def connect(self): self.initConnection() try: self.connector = pymssql.connect(host="%s:%d" % (self.hostname, self.port), user=self.user, password=self.password, database=self.db, login_timeout=conf.timeout, timeout=conf.timeout) except (pymssql.Error, _mssql.MssqlDatabaseException) as ex: raise SqlmapConnectionException(ex) except ValueError: raise SqlmapConnectionException self.initCursor() self.printConnected()
def connect(self): if not IS_WIN: errMsg = "currently, direct connection to Microsoft Access database(s) " errMsg += "is restricted to Windows platforms" raise SqlmapUnsupportedFeatureException(errMsg) self.initConnection() self.checkFileDb() try: self.connector = pyodbc.connect( 'Driver={Microsoft Access Driver (*.mdb)};Dbq=%s;Uid=Admin;Pwd=;' % self.db) except (pyodbc.Error, pyodbc.OperationalError) as ex: raise SqlmapConnectionException(getSafeExString(ex)) self.initCursor() self.printConnected()
def _get_cursor(self): threadData = getCurrentThreadData() if threadData.hashDBCursor is None: try: connection = sqlite3.connect(self.filepath, timeout=3, isolation_level=None) threadData.hashDBCursor = connection.cursor() threadData.hashDBCursor.execute( "CREATE TABLE IF NOT EXISTS storage (id INTEGER PRIMARY KEY, value TEXT)" ) connection.commit() except Exception, ex: errMsg = "error occurred while opening a session " errMsg += "file '%s' ('%s')" % (self.filepath, getSafeExString(ex)) raise SqlmapConnectionException(errMsg)
def connect(self): self.initConnection() if not self.hostname: self.checkFileDb() try: # Reference: http://www.daniweb.com/forums/thread248499.html self.connector = kinterbasdb.connect( host=self.hostname.encode(UNICODE_ENCODING), database=self.db.encode(UNICODE_ENCODING), user=self.user.encode(UNICODE_ENCODING), password=self.password.encode(UNICODE_ENCODING), charset="UTF8") except kinterbasdb.OperationalError as ex: raise SqlmapConnectionException(getSafeExString(ex)) self.initCursor() self.printConnected()
def retrieve(self, key, unserialize=False): retVal = None if key and (self._write_cache or os.path.isfile(self.filepath)): hash_ = HashDB.hashKey(key) retVal = self._write_cache.get(hash_) if not retVal: for _ in xrange(HASHDB_RETRIEVE_RETRIES): try: for row in self.cursor.execute( "SELECT value FROM storage WHERE id=?", (hash_, )): retVal = row[0] except (sqlite3.OperationalError, sqlite3.DatabaseError) as ex: if any(_ in getSafeExString(ex) for _ in ("locked", "no such table")): warnMsg = "problem occurred while accessing session file '%s' ('%s')" % ( self.filepath, getSafeExString(ex)) singleTimeWarnMessage(warnMsg) elif "Could not decode" in getSafeExString(ex): break else: errMsg = "error occurred while accessing session file '%s' ('%s'). " % ( self.filepath, getSafeExString(ex)) errMsg += "If the problem persists please rerun with '--flush-session'" raise SqlmapConnectionException(errMsg) else: break time.sleep(1) if retVal and unserialize: try: retVal = unserializeObject(retVal) except: retVal = None warnMsg = "error occurred while unserializing value for session key '%s'. " % key warnMsg += "If the problem persists please rerun with '--flush-session'" logger.warn(warnMsg) return retVal
def _search(dork): """ This method performs the effective search on Google providing the google dork and the Google session cookie """ if not dork: return None headers = {} headers[HTTP_HEADER.USER_AGENT] = dict(conf.httpHeaders).get(HTTP_HEADER.USER_AGENT, DUMMY_SEARCH_USER_AGENT) headers[HTTP_HEADER.ACCEPT_ENCODING] = HTTP_ACCEPT_ENCODING_HEADER_VALUE try: req = urllib2.Request("https://www.google.com/ncr", headers=headers) conn = urllib2.urlopen(req) except Exception, ex: errMsg = "unable to connect to Google ('%s')" % getSafeExString(ex) raise SqlmapConnectionException(errMsg)
def connect(self): self.initConnection() self.__dsn = cx_Oracle.makedsn(self.hostname, self.port, self.db) self.__dsn = utf8encode(self.__dsn) self.user = utf8encode(self.user) self.password = utf8encode(self.password) try: self.connector = cx_Oracle.connect(dsn=self.__dsn, user=self.user, password=self.password, mode=cx_Oracle.SYSDBA) logger.info("successfully connected as SYSDBA") except (cx_Oracle.OperationalError, cx_Oracle.DatabaseError): try: self.connector = cx_Oracle.connect(dsn=self.__dsn, user=self.user, password=self.password) except (cx_Oracle.OperationalError, cx_Oracle.DatabaseError), msg: raise SqlmapConnectionException(msg)
class HTTPSConnection(httplib.HTTPSConnection): """ Connection class that enables usage of newer SSL protocols. Reference: http://bugs.python.org/msg128686 """ def __init__(self, *args, **kwargs): httplib.HTTPSConnection.__init__(self, *args, **kwargs) def connect(self): def create_sock(): sock = socket.create_connection((self.host, self.port), self.timeout) if getattr(self, "_tunnel_host", None): self.sock = sock self._tunnel() return sock success = False for protocol in _protocols: try: sock = create_sock() _ = ssl.wrap_socket(sock, self.key_file, self.cert_file, ssl_version=protocol) if _: success = True self.sock = _ _protocols.remove(protocol) _protocols.insert(0, protocol) break else: sock.close() except (ssl.SSLError, socket.error, httplib.BadStatusLine), errMsg: self._tunnel_host = None logger.debug("SSL connection error occurred ('%s')" % errMsg) if not success: raise SqlmapConnectionException("can't establish SSL connection")
class Google(object): """ This class defines methods used to perform Google dorking (command line option '-g <google dork>' """ def __init__(self, handlers): self._cj = cookielib.CookieJar() handlers.append(urllib2.HTTPCookieProcessor(self._cj)) self.opener = urllib2.build_opener(*handlers) self.opener.addheaders = conf.httpHeaders try: conn = self.opener.open("http://www.google.com/ncr") conn.info() # retrieve session cookie except urllib2.HTTPError, e: e.info() except urllib2.URLError: errMsg = "unable to connect to Google" raise SqlmapConnectionException(errMsg)
def connect(self): if _sqlalchemy: self.initConnection() try: if not self.port and self.db: if not os.path.exists(self.db): raise SqlmapFilePathException, "the provided database file '%s' does not exist" % self.db _ = conf.direct.split("//", 1) conf.direct = "%s////%s" % (_[0], os.path.abspath(self.db)) if self.dialect: conf.direct = conf.direct.replace(conf.dbms, self.dialect) engine = _sqlalchemy.create_engine(conf.direct, connect_args={'check_same_thread':False} if self.dialect == "sqlite" else {}) self.connector = engine.connect() except SqlmapFilePathException: raise except Exception, msg: raise SqlmapConnectionException("SQLAlchemy connection issue ('%s')" % msg[0]) self.printConnected()
class Connector(GenericConnector): """ Homepage: http://code.google.com/p/pymysql/ User guide: http://code.google.com/p/pymysql/ API: http://code.google.com/p/pymysql/ Debian package: <none> License: MIT Possible connectors: http://wiki.python.org/moin/MySQL """ def __init__(self): GenericConnector.__init__(self) def connect(self): self.initConnection() try: self.connector = pymysql.connect(host=self.hostname, user=self.user, passwd=self.password, db=self.db, port=self.port, connect_timeout=conf.timeout, use_unicode=True) except (pymysql.OperationalError, pymysql.InternalError, pymysql.ProgrammingError), msg: raise SqlmapConnectionException(msg[1]) except struct.error, msg: raise SqlmapConnectionException(msg)
self.printConnected() def fetchall(self): try: retVal = [] for row in self.cursor.fetchall(): retVal.append(tuple(row)) return retVal except _sqlalchemy.exc.ProgrammingError, msg: logger.log( logging.WARN if conf.dbmsHandler else logging.DEBUG, "(remote) %s" % msg.message if hasattr(msg, "message") else msg) return None def execute(self, query): try: self.cursor = self.connector.execute(query) except (_sqlalchemy.exc.OperationalError, _sqlalchemy.exc.ProgrammingError), msg: logger.log( logging.WARN if conf.dbmsHandler else logging.DEBUG, "(remote) %s" % msg.message if hasattr(msg, "message") else msg) except _sqlalchemy.exc.InternalError, msg: raise SqlmapConnectionException(msg[1]) def select(self, query): self.execute(query) return self.fetchall()
def setHandler(): """ Detect which is the target web application back-end database management system. """ items = [ (DBMS.MYSQL, MYSQL_ALIASES, MySQLMap, MySQLConn), (DBMS.ORACLE, ORACLE_ALIASES, OracleMap, OracleConn), (DBMS.PGSQL, PGSQL_ALIASES, PostgreSQLMap, PostgreSQLConn), (DBMS.MSSQL, MSSQL_ALIASES, MSSQLServerMap, MSSQLServerConn), (DBMS.SQLITE, SQLITE_ALIASES, SQLiteMap, SQLiteConn), (DBMS.ACCESS, ACCESS_ALIASES, AccessMap, AccessConn), (DBMS.FIREBIRD, FIREBIRD_ALIASES, FirebirdMap, FirebirdConn), (DBMS.MAXDB, MAXDB_ALIASES, MaxDBMap, MaxDBConn), (DBMS.SYBASE, SYBASE_ALIASES, SybaseMap, SybaseConn), (DBMS.DB2, DB2_ALIASES, DB2Map, DB2Conn), (DBMS.HSQLDB, HSQLDB_ALIASES, HSQLDBMap, HSQLDBConn), (DBMS.H2, H2_ALIASES, H2Map, H2Conn), (DBMS.INFORMIX, INFORMIX_ALIASES, InformixMap, InformixConn), ] _ = max(_ if (conf.get("dbms") or Backend.getIdentifiedDbms() or kb.heuristicExtendedDbms or "").lower() in _[1] else () for _ in items) if _: items.remove(_) items.insert(0, _) for dbms, aliases, Handler, Connector in items: if conf.forceDbms: if conf.forceDbms.lower() not in aliases: continue else: kb.dbms = conf.dbms = conf.forceDbms = dbms if kb.dbmsFilter: if dbms not in kb.dbmsFilter: continue handler = Handler() conf.dbmsConnector = Connector() if conf.direct: exception = None dialect = DBMS_DICT[dbms][3] if dialect: try: sqlalchemy = SQLAlchemy(dialect=dialect) sqlalchemy.connect() if sqlalchemy.connector: conf.dbmsConnector = sqlalchemy except Exception as ex: exception = ex if not dialect or exception: try: conf.dbmsConnector.connect() except Exception as ex: if exception: raise exception else: if not isinstance(ex, NameError): raise else: msg = "support for direct connection to '%s' is not available. " % dbms msg += "Please rerun with '--dependencies'" raise SqlmapConnectionException(msg) if conf.forceDbms == dbms or handler.checkDbms(): if kb.resolutionDbms: conf.dbmsHandler = max(_ for _ in items if _[0] == kb.resolutionDbms)[2]() conf.dbmsHandler._dbms = kb.resolutionDbms else: conf.dbmsHandler = handler conf.dbmsHandler._dbms = dbms break else: conf.dbmsConnector = None # At this point back-end DBMS is correctly fingerprinted, no need # to enforce it anymore Backend.flushForcedDbms()
def connect(self): def create_sock(): sock = socket.create_connection((self.host, self.port), self.timeout) if getattr(self, "_tunnel_host", None): self.sock = sock self._tunnel() return sock success = False # Reference(s): https://docs.python.org/2/library/ssl.html#ssl.SSLContext # https://www.mnot.net/blog/2014/12/27/python_2_and_tls_sni if re.search(r"\A[\d.]+\Z", self.host) is None and kb.tlsSNI.get( self.host) is not False and not any( (conf.proxy, conf.tor)) and hasattr(ssl, "SSLContext"): for protocol in filter(lambda _: _ >= ssl.PROTOCOL_TLSv1, _protocols): try: sock = create_sock() context = ssl.SSLContext(protocol) _ = context.wrap_socket(sock, do_handshake_on_connect=True, server_hostname=self.host) if _: success = True self.sock = _ _protocols.remove(protocol) _protocols.insert(0, protocol) break else: sock.close() except (ssl.SSLError, socket.error, _http_client.BadStatusLine) as ex: self._tunnel_host = None logger.debug("SSL connection error occurred ('%s')" % getSafeExString(ex)) if kb.tlsSNI.get(self.host) is None: kb.tlsSNI[self.host] = success if not success: for protocol in _protocols: try: sock = create_sock() _ = ssl.wrap_socket(sock, self.key_file, self.cert_file, ssl_version=protocol) if _: success = True self.sock = _ _protocols.remove(protocol) _protocols.insert(0, protocol) break else: sock.close() except (ssl.SSLError, socket.error, _http_client.BadStatusLine) as ex: self._tunnel_host = None logger.debug("SSL connection error occurred ('%s')" % getSafeExString(ex)) if not success: errMsg = "can't establish SSL connection" # Reference: https://docs.python.org/2/library/ssl.html if distutils.version.LooseVersion( PYVERSION) < distutils.version.LooseVersion("2.7.9"): errMsg += " (please retry with Python >= 2.7.9)" raise SqlmapConnectionException(errMsg)
_protocols.insert(0, protocol) break else: sock.close() except (ssl.SSLError, socket.error, httplib.BadStatusLine), ex: self._tunnel_host = None logger.debug("SSL connection error occurred ('%s')" % getSafeExString(ex)) if not success: errMsg = "can't establish SSL connection" # Reference: https://docs.python.org/2/library/ssl.html if distutils.version.LooseVersion( PYVERSION) < distutils.version.LooseVersion("2.7.9"): errMsg += " (please retry with Python >= 2.7.9)" raise SqlmapConnectionException(errMsg) class HTTPSHandler(urllib2.HTTPSHandler): def https_open(self, req): return self.do_open( HTTPSConnection if ssl else httplib.HTTPSConnection, req) # Bug fix (http://bugs.python.org/issue17849) def _(self, *args): return self._readline()
ssl_version=protocol) if _: success = True self.sock = _ _protocols.remove(protocol) _protocols.insert(0, protocol) break else: sock.close() except (ssl.SSLError, socket.error, httplib.BadStatusLine), ex: self._tunnel_host = None logger.debug("SSL connection error occurred ('%s')" % getSafeExString(ex)) if not success: raise SqlmapConnectionException("can't establish SSL connection") class HTTPSHandler(urllib2.HTTPSHandler): def https_open(self, req): return self.do_open( HTTPSConnection if ssl else httplib.HTTPSConnection, req) # Bug fix (http://bugs.python.org/issue17849) def _(self, *args): return self._readline()
'Driver={Microsoft Access Driver (*.mdb)};Dbq=%s;Uid=Admin;Pwd=;' % self.db) except (pyodbc.Error, pyodbc.OperationalError), msg: raise SqlmapConnectionException(getSafeExString(msg)) self.initCursor() self.printConnected() def fetchall(self): try: return self.cursor.fetchall() except pyodbc.ProgrammingError, msg: logger.log(logging.WARN if conf.dbmsHandler else logging.DEBUG, "(remote) %s" % getSafeExString(msg)) return None def execute(self, query): try: self.cursor.execute(query) except (pyodbc.OperationalError, pyodbc.ProgrammingError), msg: logger.log(logging.WARN if conf.dbmsHandler else logging.DEBUG, "(remote) %s" % getSafeExString(msg)) except pyodbc.Error, msg: raise SqlmapConnectionException(getSafeExString(msg)) self.connector.commit() def select(self, query): self.execute(query) return self.fetchall()
def connect(self): def create_sock(): sock = socket.create_connection((self.host, self.port), self.timeout) if getattr(self, "_tunnel_host", None): self.sock = sock self._tunnel() return sock success = False # Reference(s): https://docs.python.org/2/library/ssl.html#ssl.SSLContext # https://www.mnot.net/blog/2014/12/27/python_2_and_tls_sni if re.search(r"\A[\d.]+\Z", self.host) is None and kb.tlsSNI.get( self.host) is not False and hasattr(ssl, "SSLContext"): for protocol in (_ for _ in _protocols if _ >= ssl.PROTOCOL_TLSv1): try: sock = create_sock() if protocol not in _contexts: _contexts[protocol] = ssl.SSLContext(protocol) try: # Reference(s): https://askubuntu.com/a/1263098 # https://askubuntu.com/a/1250807 _contexts[protocol].set_ciphers( "DEFAULT@SECLEVEL=1") except ssl.SSLError: pass result = _contexts[protocol].wrap_socket( sock, do_handshake_on_connect=True, server_hostname=self.host) if result: success = True self.sock = result _protocols.remove(protocol) _protocols.insert(0, protocol) break else: sock.close() except (ssl.SSLError, socket.error, _http_client.BadStatusLine) as ex: self._tunnel_host = None logger.debug( "SSL connection error occurred for '%s' ('%s')" % (_lut[protocol], getSafeExString(ex))) if kb.tlsSNI.get(self.host) is None: kb.tlsSNI[self.host] = success if not success: for protocol in _protocols: try: sock = create_sock() _ = ssl.wrap_socket(sock, self.key_file, self.cert_file, ssl_version=protocol) if _: success = True self.sock = _ _protocols.remove(protocol) _protocols.insert(0, protocol) break else: sock.close() except (ssl.SSLError, socket.error, _http_client.BadStatusLine) as ex: self._tunnel_host = None logger.debug( "SSL connection error occurred for '%s' ('%s')" % (_lut[protocol], getSafeExString(ex))) if not success: errMsg = "can't establish SSL connection" # Reference: https://docs.python.org/2/library/ssl.html if distutils.version.LooseVersion( PYVERSION) < distutils.version.LooseVersion("2.7.9"): errMsg += " (please retry with Python >= 2.7.9)" if kb.sslSuccess and not self.retrying: self.retrying = True for _ in xrange(conf.retries): try: self.connect() except SqlmapConnectionException: pass else: return raise SqlmapConnectionException(errMsg) else: kb.sslSuccess = True
def start(): """ This function calls a function that performs checks on both URL stability and all GET, POST, Cookie and User-Agent parameters to check if they are dynamic and SQL injection affected """ if conf.hashFile: crackHashFile(conf.hashFile) if conf.direct: initTargetEnv() setupTargetEnv() action() return True if conf.url and not any((conf.forms, conf.crawlDepth)): kb.targets.add((conf.url, conf.method, conf.data, conf.cookie, None)) if conf.configFile and not kb.targets: errMsg = "you did not edit the configuration file properly, set " errMsg += "the target URL, list of targets or google dork" logger.error(errMsg) return False if kb.targets and isListLike(kb.targets) and len(kb.targets) > 1: infoMsg = "found a total of %d targets" % len(kb.targets) logger.info(infoMsg) targetCount = 0 initialHeaders = list(conf.httpHeaders) for targetUrl, targetMethod, targetData, targetCookie, targetHeaders in kb.targets: targetCount += 1 try: if conf.checkInternet: infoMsg = "checking for Internet connection" logger.info(infoMsg) if not checkInternet(): warnMsg = "[%s] [WARNING] no connection detected" % time.strftime( "%X") dataToStdout(warnMsg) valid = False for _ in xrange(conf.retries): if checkInternet(): valid = True break else: dataToStdout('.') time.sleep(5) if not valid: errMsg = "please check your Internet connection and rerun" raise SqlmapConnectionException(errMsg) else: dataToStdout("\n") conf.url = targetUrl conf.method = targetMethod.upper().strip( ) if targetMethod else targetMethod conf.data = targetData conf.cookie = targetCookie conf.httpHeaders = list(initialHeaders) conf.httpHeaders.extend(targetHeaders or []) if conf.randomAgent or conf.mobile: for header, value in initialHeaders: if header.upper() == HTTP_HEADER.USER_AGENT.upper(): conf.httpHeaders.append((header, value)) break if conf.data: # Note: explicitly URL encode __ ASP(.NET) parameters (e.g. to avoid problems with Base64 encoded '+' character) - standard procedure in web browsers conf.data = re.sub( r"\b(__\w+)=([^&]+)", lambda match: "%s=%s" % (match.group(1), urlencode(match.group(2), safe='%')), conf.data) conf.httpHeaders = [ conf.httpHeaders[i] for i in xrange(len(conf.httpHeaders)) if conf.httpHeaders[i][0].upper() not in ( __[0].upper() for __ in conf.httpHeaders[i + 1:]) ] initTargetEnv() parseTargetUrl() testSqlInj = False if PLACE.GET in conf.parameters and not any( (conf.data, conf.testParameter)): for parameter in re.findall( r"([^=]+)=([^%s]+%s?|\Z)" % (re.escape(conf.paramDel or "") or DEFAULT_GET_POST_DELIMITER, re.escape(conf.paramDel or "") or DEFAULT_GET_POST_DELIMITER), conf.parameters[PLACE.GET]): paramKey = (conf.hostname, conf.path, PLACE.GET, parameter[0]) if paramKey not in kb.testedParams: testSqlInj = True break else: paramKey = (conf.hostname, conf.path, None, None) if paramKey not in kb.testedParams: testSqlInj = True if testSqlInj and conf.hostname in kb.vulnHosts: if kb.skipVulnHost is None: message = "SQL injection vulnerability has already been detected " message += "against '%s'. Do you want to skip " % conf.hostname message += "further tests involving it? [Y/n]" kb.skipVulnHost = readInput(message, default='Y', boolean=True) testSqlInj = not kb.skipVulnHost if not testSqlInj: infoMsg = "skipping '%s'" % targetUrl logger.info(infoMsg) continue if conf.multipleTargets: if conf.forms and conf.method: message = "[%d/%s] Form:\n%s %s" % ( targetCount, len(kb.targets) if isListLike( kb.targets) else '?', conf.method, targetUrl) else: message = "[%d/%s] URL:\n%s %s" % ( targetCount, len(kb.targets) if isListLike( kb.targets) else '?', HTTPMETHOD.GET, targetUrl) if conf.cookie: message += "\nCookie: %s" % conf.cookie if conf.data is not None: message += "\n%s data: %s" % ( (conf.method if conf.method != HTTPMETHOD.GET else None) or HTTPMETHOD.POST, urlencode(conf.data or "") if re.search(r"\A\s*[<{]", conf.data or "") is None else conf.data) if conf.forms and conf.method: if conf.method == HTTPMETHOD.GET and targetUrl.find( "?") == -1: continue message += "\ndo you want to test this form? [Y/n/q] " choice = readInput(message, default='Y').upper() if choice == 'N': continue elif choice == 'Q': break else: if conf.method != HTTPMETHOD.GET: message = "Edit %s data [default: %s]%s: " % ( conf.method, urlencode(conf.data or "") if re.search(r"\A\s*[<{]", conf.data or "None") is None else conf.data, " (Warning: blank fields detected)" if conf.data and extractRegexResult( EMPTY_FORM_FIELDS_REGEX, conf.data) else "") conf.data = readInput(message, default=conf.data) conf.data = _randomFillBlankFields(conf.data) conf.data = urldecode( conf.data) if conf.data and urlencode( DEFAULT_GET_POST_DELIMITER, None) not in conf.data else conf.data else: if '?' in targetUrl: firstPart, secondPart = targetUrl.split('?', 1) message = "Edit GET data [default: %s]: " % secondPart test = readInput(message, default=secondPart) test = _randomFillBlankFields(test) conf.url = "%s?%s" % (firstPart, test) parseTargetUrl() else: if not conf.scope: message += "\ndo you want to test this URL? [Y/n/q]" choice = readInput(message, default='Y').upper() if choice == 'N': dataToStdout(os.linesep) continue elif choice == 'Q': break else: pass infoMsg = "testing URL '%s'" % targetUrl logger.info(infoMsg) setupTargetEnv() if not checkConnection(suppressOutput=conf.forms): continue if conf.rParam and kb.originalPage: kb.randomPool = dict([ _ for _ in kb.randomPool.items() if isinstance(_[1], list) ]) for match in re.finditer( r"(?si)<select[^>]+\bname\s*=\s*[\"']([^\"']+)(.+?)</select>", kb.originalPage): name, _ = match.groups() options = tuple( re.findall(r"<option[^>]+\bvalue\s*=\s*[\"']([^\"']+)", _)) if options: kb.randomPool[name] = options checkWaf() if conf.nullConnection: checkNullConnection() if (len(kb.injections) == 0 or (len(kb.injections) == 1 and kb.injections[0].place is None) ) and (kb.injection.place is None or kb.injection.parameter is None): if not any((conf.string, conf.notString, conf.regexp )) and PAYLOAD.TECHNIQUE.BOOLEAN in conf.technique: # NOTE: this is not needed anymore, leaving only to display # a warning message to the user in case the page is not stable checkStability() # Do a little prioritization reorder of a testable parameter list parameters = list(conf.parameters.keys()) # Order of testing list (first to last) orderList = (PLACE.CUSTOM_POST, PLACE.CUSTOM_HEADER, PLACE.URI, PLACE.POST, PLACE.GET) for place in orderList[::-1]: if place in parameters: parameters.remove(place) parameters.insert(0, place) proceed = True for place in parameters: # Test User-Agent and Referer headers only if # --level >= 3 skip = (place == PLACE.USER_AGENT and (kb.testOnlyCustom or conf.level < 3)) skip |= (place == PLACE.REFERER and (kb.testOnlyCustom or conf.level < 3)) # --param-filter skip |= (len(conf.paramFilter) > 0 and place.upper() not in conf.paramFilter) # Test Host header only if # --level >= 5 skip |= (place == PLACE.HOST and (kb.testOnlyCustom or conf.level < 5)) # Test Cookie header only if --level >= 2 skip |= (place == PLACE.COOKIE and (kb.testOnlyCustom or conf.level < 2)) skip |= (place == PLACE.USER_AGENT and intersect( USER_AGENT_ALIASES, conf.skip, True) not in ([], None)) skip |= (place == PLACE.REFERER and intersect( REFERER_ALIASES, conf.skip, True) not in ([], None)) skip |= (place == PLACE.COOKIE and intersect( PLACE.COOKIE, conf.skip, True) not in ([], None)) skip |= (place == PLACE.HOST and intersect( PLACE.HOST, conf.skip, True) not in ([], None)) skip &= not (place == PLACE.USER_AGENT and intersect( USER_AGENT_ALIASES, conf.testParameter, True)) skip &= not (place == PLACE.REFERER and intersect( REFERER_ALIASES, conf.testParameter, True)) skip &= not (place == PLACE.HOST and intersect( HOST_ALIASES, conf.testParameter, True)) skip &= not (place == PLACE.COOKIE and intersect( (PLACE.COOKIE, ), conf.testParameter, True)) if skip: continue if place not in conf.paramDict: continue paramDict = conf.paramDict[place] paramType = conf.method if conf.method not in ( None, HTTPMETHOD.GET, HTTPMETHOD.POST) else place for parameter, value in paramDict.items(): if not proceed: break kb.vainRun = False testSqlInj = True paramKey = (conf.hostname, conf.path, place, parameter) if paramKey in kb.testedParams: testSqlInj = False infoMsg = "skipping previously processed %sparameter '%s'" % ( "%s " % paramType if paramType != parameter else "", parameter) logger.info(infoMsg) elif any(_ in conf.testParameter for _ in (parameter, removePostHintPrefix(parameter))): pass elif parameter in conf.rParam: testSqlInj = False infoMsg = "skipping randomizing %sparameter '%s'" % ( "%s " % paramType if paramType != parameter else "", parameter) logger.info(infoMsg) elif parameter in conf.skip or kb.postHint and parameter.split( ' ')[-1] in conf.skip: testSqlInj = False infoMsg = "skipping %sparameter '%s'" % ( "%s " % paramType if paramType != parameter else "", parameter) logger.info(infoMsg) elif conf.paramExclude and ( re.search(conf.paramExclude, parameter, re.I) or kb.postHint and re.search(conf.paramExclude, parameter.split(' ')[-1], re.I)): testSqlInj = False infoMsg = "skipping %sparameter '%s'" % ( "%s " % paramType if paramType != parameter else "", parameter) logger.info(infoMsg) elif conf.csrfToken and re.search( conf.csrfToken, parameter, re.I): testSqlInj = False infoMsg = "skipping anti-CSRF token parameter '%s'" % parameter logger.info(infoMsg) # Ignore session-like parameters for --level < 4 elif conf.level < 4 and ( parameter.upper() in IGNORE_PARAMETERS or any(_ in parameter.lower() for _ in CSRF_TOKEN_PARAMETER_INFIXES) or parameter.upper().startswith( GOOGLE_ANALYTICS_COOKIE_PREFIX)): testSqlInj = False infoMsg = "ignoring %sparameter '%s'" % ( "%s " % paramType if paramType != parameter else "", parameter) logger.info(infoMsg) elif PAYLOAD.TECHNIQUE.BOOLEAN in conf.technique or conf.skipStatic: check = checkDynParam(place, parameter, value) if not check: warnMsg = "%sparameter '%s' does not appear to be dynamic" % ( "%s " % paramType if paramType != parameter else "", parameter) logger.warn(warnMsg) if conf.skipStatic: infoMsg = "skipping static %sparameter '%s'" % ( "%s " % paramType if paramType != parameter else "", parameter) logger.info(infoMsg) testSqlInj = False else: infoMsg = "%sparameter '%s' appears to be dynamic" % ( "%s " % paramType if paramType != parameter else "", parameter) logger.info(infoMsg) kb.testedParams.add(paramKey) if testSqlInj: try: if place == PLACE.COOKIE: pushValue(kb.mergeCookies) kb.mergeCookies = False check = heuristicCheckSqlInjection( place, parameter) if check != HEURISTIC_TEST.POSITIVE: if conf.smart or ( kb.ignoreCasted and check == HEURISTIC_TEST.CASTED): infoMsg = "skipping %sparameter '%s'" % ( "%s " % paramType if paramType != parameter else "", parameter) logger.info(infoMsg) continue infoMsg = "testing for SQL injection on %sparameter '%s'" % ( "%s " % paramType if paramType != parameter else "", parameter) logger.info(infoMsg) injection = checkSqlInjection( place, parameter, value) proceed = not kb.endDetection injectable = False if getattr(injection, "place", None) is not None: if NOTE.FALSE_POSITIVE_OR_UNEXPLOITABLE in injection.notes: kb.falsePositives.append(injection) else: injectable = True kb.injections.append(injection) # In case when user wants to end detection phase (Ctrl+C) if not proceed: break msg = "%sparameter '%s' " % ( "%s " % injection.place if injection.place != injection.parameter else "", injection.parameter) msg += "is vulnerable. Do you want to keep testing the others (if any)? [y/N] " if not readInput(msg, default='N', boolean=True): proceed = False paramKey = (conf.hostname, conf.path, None, None) kb.testedParams.add(paramKey) if not injectable: warnMsg = "%sparameter '%s' does not seem to be injectable" % ( "%s " % paramType if paramType != parameter else "", parameter) logger.warn(warnMsg) finally: if place == PLACE.COOKIE: kb.mergeCookies = popValue() if len(kb.injections) == 0 or (len(kb.injections) == 1 and kb.injections[0].place is None): if kb.vainRun and not conf.multipleTargets: errMsg = "no parameter(s) found for testing in the provided data " errMsg += "(e.g. GET parameter 'id' in 'www.site.com/index.php?id=1')" if kb.originalPage: advice = [] if not conf.forms and re.search( r"<form", kb.originalPage) is not None: advice.append("--forms") if not conf.crawlDepth and re.search( r"href=[\"']/?\w", kb.originalPage) is not None: advice.append("--crawl=2") if advice: errMsg += ". You are advised to rerun with '%s'" % ' '.join( advice) raise SqlmapNoneDataException(errMsg) else: errMsg = "all tested parameters do not appear to be injectable." if conf.level < 5 or conf.risk < 3: errMsg += " Try to increase values for '--level'/'--risk' options " errMsg += "if you wish to perform more tests." if isinstance(conf.technique, list) and len(conf.technique) < 5: errMsg += " Rerun without providing the option '--technique'." if not conf.textOnly and kb.originalPage: percent = ( 100.0 * len(getFilteredPageContent(kb.originalPage)) / len(kb.originalPage)) if kb.dynamicMarkings: errMsg += " You can give it a go with the switch '--text-only' " errMsg += "if the target page has a low percentage " errMsg += "of textual content (~%.2f%% of " % percent errMsg += "page content is text)." elif percent < LOW_TEXT_PERCENT and not kb.errorIsNone: errMsg += " Please retry with the switch '--text-only' " errMsg += "(along with --technique=BU) as this case " errMsg += "looks like a perfect candidate " errMsg += "(low textual content along with inability " errMsg += "of comparison engine to detect at least " errMsg += "one dynamic parameter)." if kb.heuristicTest == HEURISTIC_TEST.POSITIVE: errMsg += " As heuristic test turned out positive you are " errMsg += "strongly advised to continue on with the tests." if conf.string: errMsg += " Also, you can try to rerun by providing a " errMsg += "valid value for option '--string' as perhaps the string you " errMsg += "have chosen does not match " errMsg += "exclusively True responses." elif conf.regexp: errMsg += " Also, you can try to rerun by providing a " errMsg += "valid value for option '--regexp' as perhaps the regular " errMsg += "expression that you have chosen " errMsg += "does not match exclusively True responses." if not conf.tamper: errMsg += " If you suspect that there is some kind of protection mechanism " errMsg += "involved (e.g. WAF) maybe you could try to use " errMsg += "option '--tamper' (e.g. '--tamper=space2comment')" if not conf.randomAgent: errMsg += " and/or switch '--random-agent'" raise SqlmapNotVulnerableException(errMsg.rstrip('.')) else: # Flush the flag kb.testMode = False _saveToResultsFile() _saveToHashDB() _showInjections() _selectInjection() if kb.injection.place is not None and kb.injection.parameter is not None: if conf.multipleTargets: message = "do you want to exploit this SQL injection? [Y/n] " condition = readInput(message, default='Y', boolean=True) else: condition = True if condition: action() except KeyboardInterrupt: if kb.lastCtrlCTime and (time.time() - kb.lastCtrlCTime < 1): kb.multipleCtrlC = True raise SqlmapUserQuitException( "user aborted (Ctrl+C was pressed multiple times)") kb.lastCtrlCTime = time.time() if conf.multipleTargets: warnMsg = "user aborted in multiple target mode" logger.warn(warnMsg) message = "do you want to skip to the next target in list? [Y/n/q]" choice = readInput(message, default='Y').upper() if choice == 'N': return False elif choice == 'Q': raise SqlmapUserQuitException else: raise except SqlmapSkipTargetException: pass except SqlmapUserQuitException: raise except SqlmapSilentQuitException: raise except SqlmapBaseException as ex: errMsg = getSafeExString(ex) if conf.multipleTargets: _saveToResultsFile() errMsg += ", skipping to the next target" logger.error(errMsg.lstrip(", ")) else: logger.critical(errMsg) return False finally: showHttpErrorCodes() if kb.maxConnectionsFlag: warnMsg = "it appears that the target " warnMsg += "has a maximum connections " warnMsg += "constraint" logger.warn(warnMsg) if kb.dataOutputFlag and not conf.multipleTargets: logger.info("fetched data logged to text files under '%s'" % conf.outputPath) if conf.multipleTargets: if conf.resultsFile: infoMsg = "you can find results of scanning in multiple targets " infoMsg += "mode inside the CSV file '%s'" % conf.resultsFile logger.info(infoMsg) return True
def getPage(**kwargs): """ This method connects to the target URL or proxy and returns the target URL page content """ if conf.delay is not None and isinstance( conf.delay, (int, float)) and conf.delay > 0: time.sleep(conf.delay) elif conf.cpuThrottle: cpuThrottle(conf.cpuThrottle) if conf.dummy: return randomStr(int(randomInt()), alphabet=[chr(_) for _ in xrange(256) ]), {}, int(randomInt()) threadData = getCurrentThreadData() with kb.locks.request: kb.requestCounter += 1 threadData.lastRequestUID = kb.requestCounter url = kwargs.get("url", None) or conf.url get = kwargs.get("get", None) post = kwargs.get("post", None) method = kwargs.get("method", None) cookie = kwargs.get("cookie", None) ua = kwargs.get("ua", None) or conf.agent referer = kwargs.get("referer", None) or conf.referer host = kwargs.get("host", None) or conf.host direct_ = kwargs.get("direct", False) multipart = kwargs.get("multipart", False) silent = kwargs.get("silent", False) raise404 = kwargs.get("raise404", True) timeout = kwargs.get("timeout", None) or conf.timeout auxHeaders = kwargs.get("auxHeaders", None) response = kwargs.get("response", False) ignoreTimeout = kwargs.get("ignoreTimeout", False) or kb.ignoreTimeout refreshing = kwargs.get("refreshing", False) retrying = kwargs.get("retrying", False) crawling = kwargs.get("crawling", False) skipRead = kwargs.get("skipRead", False) if not urlparse.urlsplit(url).netloc: url = urlparse.urljoin(conf.url, url) # flag to know if we are dealing with the same target host target = reduce( lambda x, y: x == y, map(lambda x: urlparse.urlparse(x).netloc.split(':')[0], [url, conf.url or ""])) if not retrying: # Reset the number of connection retries threadData.retriesCount = 0 # fix for known issue when urllib2 just skips the other part of provided # url splitted with space char while urlencoding it in the later phase url = url.replace(" ", "%20") conn = None code = None page = None _ = urlparse.urlsplit(url) requestMsg = u"HTTP request [#%d]:\n%s " % ( threadData.lastRequestUID, method or (HTTPMETHOD.POST if post is not None else HTTPMETHOD.GET)) requestMsg += ("%s%s" % (_.path or "/", ("?%s" % _.query) if _.query else "")) if not any( (refreshing, crawling)) else url responseMsg = u"HTTP response " requestHeaders = u"" responseHeaders = None logHeaders = u"" skipLogTraffic = False raise404 = raise404 and not kb.ignoreNotFound # support for non-latin (e.g. cyrillic) URLs as urllib/urllib2 doesn't # support those by default url = asciifyUrl(url) # fix for known issues when using url in unicode format # (e.g. UnicodeDecodeError: "url = url + '?' + query" in redirect case) url = unicodeencode(url) try: socket.setdefaulttimeout(timeout) if direct_: if '?' in url: url, params = url.split('?', 1) params = urlencode(params) url = "%s?%s" % (url, params) requestMsg += "?%s" % params elif multipart: # Needed in this form because of potential circle dependency # problem (option -> update -> connect -> option) from lib.core.option import proxyHandler multipartOpener = urllib2.build_opener( proxyHandler, multipartpost.MultipartPostHandler) conn = multipartOpener.open(unicodeencode(url), multipart) page = Connect._connReadProxy(conn) if not skipRead else None responseHeaders = conn.info() responseHeaders[URI_HTTP_HEADER] = conn.geturl() page = decodePage( page, responseHeaders.get(HTTP_HEADER.CONTENT_ENCODING), responseHeaders.get(HTTP_HEADER.CONTENT_TYPE)) return page elif any((refreshing, crawling)): pass elif target: if conf.forceSSL and urlparse.urlparse(url).scheme != "https": url = re.sub("\Ahttp:", "https:", url, re.I) url = re.sub(":80/", ":443/", url, re.I) if PLACE.GET in conf.parameters and not get: get = conf.parameters[PLACE.GET] if not conf.skipUrlEncode: get = urlencode(get, limit=True) if get: url = "%s?%s" % (url, get) requestMsg += "?%s" % get if PLACE.POST in conf.parameters and not post and method in ( None, HTTPMETHOD.POST): post = conf.parameters[PLACE.POST] elif get: url = "%s?%s" % (url, get) requestMsg += "?%s" % get requestMsg += " %s" % httplib.HTTPConnection._http_vsn_str # Prepare HTTP headers headers = forgeHeaders({ HTTP_HEADER.COOKIE: cookie, HTTP_HEADER.USER_AGENT: ua, HTTP_HEADER.REFERER: referer }) if kb.authHeader: headers[HTTP_HEADER.AUTHORIZATION] = kb.authHeader if kb.proxyAuthHeader: headers[HTTP_HEADER.PROXY_AUTHORIZATION] = kb.proxyAuthHeader headers[HTTP_HEADER.ACCEPT] = HTTP_ACCEPT_HEADER_VALUE headers[ HTTP_HEADER. ACCEPT_ENCODING] = HTTP_ACCEPT_ENCODING_HEADER_VALUE if kb.pageCompress else "identity" headers[HTTP_HEADER.HOST] = host or getHostHeader(url) if post is not None and HTTP_HEADER.CONTENT_TYPE not in headers: headers[ HTTP_HEADER.CONTENT_TYPE] = POST_HINT_CONTENT_TYPES.get( kb.postHint, DEFAULT_CONTENT_TYPE) if headers.get( HTTP_HEADER.CONTENT_TYPE) == POST_HINT_CONTENT_TYPES[ POST_HINT.MULTIPART]: warnMsg = "missing 'boundary parameter' in '%s' header. " % HTTP_HEADER.CONTENT_TYPE warnMsg += "Will try to reconstruct" singleTimeWarnMessage(warnMsg) boundary = findMultipartPostBoundary(conf.data) if boundary: headers[HTTP_HEADER.CONTENT_TYPE] = "%s; boundary=%s" % ( headers[HTTP_HEADER.CONTENT_TYPE], boundary) if auxHeaders: for key, item in auxHeaders.items(): headers[key] = item for key, item in headers.items(): del headers[key] headers[unicodeencode(key, kb.pageEncoding)] = unicodeencode( item, kb.pageEncoding) post = unicodeencode(post, kb.pageEncoding) if method: req = MethodRequest(url, post, headers) req.set_method(method) else: req = urllib2.Request(url, post, headers) requestHeaders += "\n".join( "%s: %s" % (key.capitalize() if isinstance(key, basestring) else key, getUnicode(value)) for (key, value) in req.header_items()) if not getRequestHeader(req, HTTP_HEADER.COOKIE) and conf.cj: conf.cj._policy._now = conf.cj._now = int(time.time()) cookies = conf.cj._cookies_for_request(req) requestHeaders += "\n%s" % ("Cookie: %s" % ";".join( "%s=%s" % (getUnicode(cookie.name), getUnicode(cookie.value)) for cookie in cookies)) if post is not None: if not getRequestHeader(req, HTTP_HEADER.CONTENT_LENGTH): requestHeaders += "\n%s: %d" % (string.capwords( HTTP_HEADER.CONTENT_LENGTH), len(post)) if not getRequestHeader(req, HTTP_HEADER.CONNECTION): requestHeaders += "\n%s: close" % HTTP_HEADER.CONNECTION requestMsg += "\n%s" % requestHeaders if post is not None: requestMsg += "\n\n%s" % getUnicode(post) requestMsg += "\n" threadData.lastRequestMsg = requestMsg logger.log(CUSTOM_LOGGING.TRAFFIC_OUT, requestMsg) conn = urllib2.urlopen(req) if not kb.authHeader and getRequestHeader( req, HTTP_HEADER.AUTHORIZATION ) and conf.authType == AUTH_TYPE.BASIC: kb.authHeader = getRequestHeader(req, HTTP_HEADER.AUTHORIZATION) if not kb.proxyAuthHeader and getRequestHeader( req, HTTP_HEADER.PROXY_AUTHORIZATION): kb.proxyAuthHeader = getRequestHeader( req, HTTP_HEADER.PROXY_AUTHORIZATION) # Return response object if response: return conn, None, None # Get HTTP response if hasattr(conn, 'redurl'): page = (threadData.lastRedirectMsg[1] if kb.redirectChoice == REDIRECTION.NO\ else Connect._connReadProxy(conn)) if not skipRead else None skipLogTraffic = kb.redirectChoice == REDIRECTION.NO code = conn.redcode else: page = Connect._connReadProxy(conn) if not skipRead else None code = code or conn.code responseHeaders = conn.info() responseHeaders[URI_HTTP_HEADER] = conn.geturl() page = decodePage( page, responseHeaders.get(HTTP_HEADER.CONTENT_ENCODING), responseHeaders.get(HTTP_HEADER.CONTENT_TYPE)) status = getUnicode(conn.msg) if extractRegexResult(META_REFRESH_REGEX, page) and not refreshing: url = extractRegexResult(META_REFRESH_REGEX, page) debugMsg = "got HTML meta refresh header" logger.debug(debugMsg) if kb.alwaysRefresh is None: msg = "sqlmap got a refresh request " msg += "(redirect like response common to login pages). " msg += "Do you want to apply the refresh " msg += "from now on (or stay on the original page)? [Y/n]" choice = readInput(msg, default="Y") kb.alwaysRefresh = choice not in ("n", "N") if kb.alwaysRefresh: if url.lower().startswith('http://'): kwargs['url'] = url else: kwargs['url'] = conf.url[:conf.url.rfind('/') + 1] + url threadData.lastRedirectMsg = (threadData.lastRequestUID, page) kwargs['refreshing'] = True kwargs['get'] = None kwargs['post'] = None try: return Connect._getPageProxy(**kwargs) except SqlmapSyntaxException: pass # Explicit closing of connection object if not conf.keepAlive: try: if hasattr(conn.fp, '_sock'): conn.fp._sock.close() conn.close() except Exception, msg: warnMsg = "problem occurred during connection closing ('%s')" % msg logger.warn(warnMsg) except urllib2.HTTPError, e: page = None responseHeaders = None try: page = e.read() if not skipRead else None responseHeaders = e.info() responseHeaders[URI_HTTP_HEADER] = e.geturl() page = decodePage( page, responseHeaders.get(HTTP_HEADER.CONTENT_ENCODING), responseHeaders.get(HTTP_HEADER.CONTENT_TYPE)) except socket.timeout: warnMsg = "connection timed out while trying " warnMsg += "to get error page information (%d)" % e.code logger.warn(warnMsg) return None, None, None except KeyboardInterrupt: raise except: pass finally: page = page if isinstance(page, unicode) else getUnicode(page) code = e.code threadData.lastHTTPError = (threadData.lastRequestUID, code) kb.httpErrorCodes[code] = kb.httpErrorCodes.get(code, 0) + 1 status = getUnicode(e.msg) responseMsg += "[#%d] (%d %s):\n" % (threadData.lastRequestUID, code, status) if responseHeaders: logHeaders = "\n".join("%s: %s" % (getUnicode(key.capitalize( ) if isinstance(key, basestring) else key), getUnicode(value)) for (key, value) in responseHeaders.items()) logHTTPTraffic( requestMsg, "%s%s\n\n%s" % (responseMsg, logHeaders, (page or "")[:MAX_CONNECTION_CHUNK_SIZE])) skipLogTraffic = True if conf.verbose <= 5: responseMsg += getUnicode(logHeaders) elif conf.verbose > 5: responseMsg += "%s\n\n%s" % (logHeaders, (page or "")[:MAX_CONNECTION_CHUNK_SIZE]) logger.log(CUSTOM_LOGGING.TRAFFIC_IN, responseMsg) if e.code == httplib.UNAUTHORIZED: errMsg = "not authorized, try to provide right HTTP " errMsg += "authentication type and valid credentials (%d)" % code raise SqlmapConnectionException(errMsg) elif e.code == httplib.NOT_FOUND: if raise404: errMsg = "page not found (%d)" % code raise SqlmapConnectionException(errMsg) else: debugMsg = "page not found (%d)" % code singleTimeLogMessage(debugMsg, logging.DEBUG) processResponse(page, responseHeaders) elif e.code == httplib.GATEWAY_TIMEOUT: if ignoreTimeout: return None, None, None else: warnMsg = "unable to connect to the target URL (%d - %s)" % ( e.code, httplib.responses[e.code]) if threadData.retriesCount < conf.retries and not kb.threadException: warnMsg += ". sqlmap is going to retry the request" logger.critical(warnMsg) return Connect._retryProxy(**kwargs) elif kb.testMode: logger.critical(warnMsg) return None, None, None else: raise SqlmapConnectionException(warnMsg) else: debugMsg = "got HTTP error code: %d (%s)" % (code, status) logger.debug(debugMsg)
def http_error_416(self, req, fp, code, msg, hdrs): # HTTP's Range Not Satisfiable error errMsg = "Invalid range" raise SqlmapConnectionException(errMsg)
class Connect(object): """ This class defines methods used to perform HTTP requests """ @staticmethod def _getPageProxy(**kwargs): return Connect.getPage(**kwargs) @staticmethod def _retryProxy(**kwargs): threadData = getCurrentThreadData() threadData.retriesCount += 1 if conf.proxyList and threadData.retriesCount >= conf.retries: warnMsg = "changing proxy" logger.warn(warnMsg) conf.proxy = conf.proxyList[0] conf.proxyList = conf.proxyList[1:] + conf.proxyList[:1] setHTTPProxy() if kb.testMode and kb.previousMethod == PAYLOAD.METHOD.TIME: # timed based payloads can cause web server unresponsiveness # if the injectable piece of code is some kind of JOIN-like query warnMsg = "most probably web server instance hasn't recovered yet " warnMsg += "from previous timed based payload. If the problem " warnMsg += "persists please wait for few minutes and rerun " warnMsg += "without flag T in option '--technique' " warnMsg += "(e.g. '--flush-session --technique=BEUS') or try to " warnMsg += "lower the value of option '--time-sec' (e.g. '--time-sec=2')" singleTimeWarnMessage(warnMsg) elif kb.originalPage is None: if conf.tor: warnMsg = "please make sure that you have " warnMsg += "Tor installed and running so " warnMsg += "you could successfully use " warnMsg += "switch '--tor' " if IS_WIN: warnMsg += "(e.g. 'https://www.torproject.org/download/download.html.en')" else: warnMsg += "(e.g. 'https://help.ubuntu.com/community/Tor')" else: warnMsg = "if the problem persists please check that the provided " warnMsg += "target URL is valid. In case that it is, you can try to rerun " warnMsg += "with the switch '--random-agent' turned on " warnMsg += "and/or proxy switches ('--ignore-proxy', '--proxy',...)" singleTimeWarnMessage(warnMsg) elif conf.threads > 1: warnMsg = "if the problem persists please try to lower " warnMsg += "the number of used threads (option '--threads')" singleTimeWarnMessage(warnMsg) time.sleep(1) kwargs['retrying'] = True return Connect._getPageProxy(**kwargs) @staticmethod def _connReadProxy(conn): retVal = "" if not kb.dnsMode and conn: headers = conn.info() if headers and (headers.getheader(HTTP_HEADER.CONTENT_ENCODING, "").lower() in ("gzip", "deflate")\ or "text" not in headers.getheader(HTTP_HEADER.CONTENT_TYPE, "").lower()): retVal = conn.read(MAX_CONNECTION_TOTAL_SIZE) if len(retVal) == MAX_CONNECTION_TOTAL_SIZE: warnMsg = "large compressed response detected. Disabling compression" singleTimeWarnMessage(warnMsg) kb.pageCompress = False else: while True: _ = conn.read(MAX_CONNECTION_CHUNK_SIZE) if len(_) == MAX_CONNECTION_CHUNK_SIZE: warnMsg = "large response detected. This could take a while" singleTimeWarnMessage(warnMsg) _ = re.sub( r"(?si)%s.+?%s" % (kb.chars.stop, kb.chars.start), "%s%s%s" % (kb.chars.stop, LARGE_CHUNK_TRIM_MARKER, kb.chars.start), _) retVal += _ else: retVal += _ break if len(retVal) > MAX_CONNECTION_TOTAL_SIZE: warnMsg = "too large response detected. Automatically trimming it" singleTimeWarnMessage(warnMsg) break return retVal @staticmethod def getPage(**kwargs): """ This method connects to the target URL or proxy and returns the target URL page content """ if conf.delay is not None and isinstance( conf.delay, (int, float)) and conf.delay > 0: time.sleep(conf.delay) elif conf.cpuThrottle: cpuThrottle(conf.cpuThrottle) if conf.dummy: return randomStr(int(randomInt()), alphabet=[chr(_) for _ in xrange(256) ]), {}, int(randomInt()) threadData = getCurrentThreadData() with kb.locks.request: kb.requestCounter += 1 threadData.lastRequestUID = kb.requestCounter url = kwargs.get("url", None) or conf.url get = kwargs.get("get", None) post = kwargs.get("post", None) method = kwargs.get("method", None) cookie = kwargs.get("cookie", None) ua = kwargs.get("ua", None) or conf.agent referer = kwargs.get("referer", None) or conf.referer host = kwargs.get("host", None) or conf.host direct_ = kwargs.get("direct", False) multipart = kwargs.get("multipart", False) silent = kwargs.get("silent", False) raise404 = kwargs.get("raise404", True) timeout = kwargs.get("timeout", None) or conf.timeout auxHeaders = kwargs.get("auxHeaders", None) response = kwargs.get("response", False) ignoreTimeout = kwargs.get("ignoreTimeout", False) or kb.ignoreTimeout refreshing = kwargs.get("refreshing", False) retrying = kwargs.get("retrying", False) crawling = kwargs.get("crawling", False) skipRead = kwargs.get("skipRead", False) if not urlparse.urlsplit(url).netloc: url = urlparse.urljoin(conf.url, url) # flag to know if we are dealing with the same target host target = reduce( lambda x, y: x == y, map(lambda x: urlparse.urlparse(x).netloc.split(':')[0], [url, conf.url or ""])) if not retrying: # Reset the number of connection retries threadData.retriesCount = 0 # fix for known issue when urllib2 just skips the other part of provided # url splitted with space char while urlencoding it in the later phase url = url.replace(" ", "%20") conn = None code = None page = None _ = urlparse.urlsplit(url) requestMsg = u"HTTP request [#%d]:\n%s " % ( threadData.lastRequestUID, method or (HTTPMETHOD.POST if post is not None else HTTPMETHOD.GET)) requestMsg += ("%s%s" % (_.path or "/", ("?%s" % _.query) if _.query else "")) if not any( (refreshing, crawling)) else url responseMsg = u"HTTP response " requestHeaders = u"" responseHeaders = None logHeaders = u"" skipLogTraffic = False raise404 = raise404 and not kb.ignoreNotFound # support for non-latin (e.g. cyrillic) URLs as urllib/urllib2 doesn't # support those by default url = asciifyUrl(url) # fix for known issues when using url in unicode format # (e.g. UnicodeDecodeError: "url = url + '?' + query" in redirect case) url = unicodeencode(url) try: socket.setdefaulttimeout(timeout) if direct_: if '?' in url: url, params = url.split('?', 1) params = urlencode(params) url = "%s?%s" % (url, params) requestMsg += "?%s" % params elif multipart: # Needed in this form because of potential circle dependency # problem (option -> update -> connect -> option) from lib.core.option import proxyHandler multipartOpener = urllib2.build_opener( proxyHandler, multipartpost.MultipartPostHandler) conn = multipartOpener.open(unicodeencode(url), multipart) page = Connect._connReadProxy(conn) if not skipRead else None responseHeaders = conn.info() responseHeaders[URI_HTTP_HEADER] = conn.geturl() page = decodePage( page, responseHeaders.get(HTTP_HEADER.CONTENT_ENCODING), responseHeaders.get(HTTP_HEADER.CONTENT_TYPE)) return page elif any((refreshing, crawling)): pass elif target: if conf.forceSSL and urlparse.urlparse(url).scheme != "https": url = re.sub("\Ahttp:", "https:", url, re.I) url = re.sub(":80/", ":443/", url, re.I) if PLACE.GET in conf.parameters and not get: get = conf.parameters[PLACE.GET] if not conf.skipUrlEncode: get = urlencode(get, limit=True) if get: url = "%s?%s" % (url, get) requestMsg += "?%s" % get if PLACE.POST in conf.parameters and not post and method in ( None, HTTPMETHOD.POST): post = conf.parameters[PLACE.POST] elif get: url = "%s?%s" % (url, get) requestMsg += "?%s" % get requestMsg += " %s" % httplib.HTTPConnection._http_vsn_str # Prepare HTTP headers headers = forgeHeaders({ HTTP_HEADER.COOKIE: cookie, HTTP_HEADER.USER_AGENT: ua, HTTP_HEADER.REFERER: referer }) if kb.authHeader: headers[HTTP_HEADER.AUTHORIZATION] = kb.authHeader if kb.proxyAuthHeader: headers[HTTP_HEADER.PROXY_AUTHORIZATION] = kb.proxyAuthHeader headers[HTTP_HEADER.ACCEPT] = HTTP_ACCEPT_HEADER_VALUE headers[ HTTP_HEADER. ACCEPT_ENCODING] = HTTP_ACCEPT_ENCODING_HEADER_VALUE if kb.pageCompress else "identity" headers[HTTP_HEADER.HOST] = host or getHostHeader(url) if post is not None and HTTP_HEADER.CONTENT_TYPE not in headers: headers[ HTTP_HEADER.CONTENT_TYPE] = POST_HINT_CONTENT_TYPES.get( kb.postHint, DEFAULT_CONTENT_TYPE) if headers.get( HTTP_HEADER.CONTENT_TYPE) == POST_HINT_CONTENT_TYPES[ POST_HINT.MULTIPART]: warnMsg = "missing 'boundary parameter' in '%s' header. " % HTTP_HEADER.CONTENT_TYPE warnMsg += "Will try to reconstruct" singleTimeWarnMessage(warnMsg) boundary = findMultipartPostBoundary(conf.data) if boundary: headers[HTTP_HEADER.CONTENT_TYPE] = "%s; boundary=%s" % ( headers[HTTP_HEADER.CONTENT_TYPE], boundary) if auxHeaders: for key, item in auxHeaders.items(): headers[key] = item for key, item in headers.items(): del headers[key] headers[unicodeencode(key, kb.pageEncoding)] = unicodeencode( item, kb.pageEncoding) post = unicodeencode(post, kb.pageEncoding) if method: req = MethodRequest(url, post, headers) req.set_method(method) else: req = urllib2.Request(url, post, headers) requestHeaders += "\n".join( "%s: %s" % (key.capitalize() if isinstance(key, basestring) else key, getUnicode(value)) for (key, value) in req.header_items()) if not getRequestHeader(req, HTTP_HEADER.COOKIE) and conf.cj: conf.cj._policy._now = conf.cj._now = int(time.time()) cookies = conf.cj._cookies_for_request(req) requestHeaders += "\n%s" % ("Cookie: %s" % ";".join( "%s=%s" % (getUnicode(cookie.name), getUnicode(cookie.value)) for cookie in cookies)) if post is not None: if not getRequestHeader(req, HTTP_HEADER.CONTENT_LENGTH): requestHeaders += "\n%s: %d" % (string.capwords( HTTP_HEADER.CONTENT_LENGTH), len(post)) if not getRequestHeader(req, HTTP_HEADER.CONNECTION): requestHeaders += "\n%s: close" % HTTP_HEADER.CONNECTION requestMsg += "\n%s" % requestHeaders if post is not None: requestMsg += "\n\n%s" % getUnicode(post) requestMsg += "\n" threadData.lastRequestMsg = requestMsg logger.log(CUSTOM_LOGGING.TRAFFIC_OUT, requestMsg) conn = urllib2.urlopen(req) if not kb.authHeader and getRequestHeader( req, HTTP_HEADER.AUTHORIZATION ) and conf.authType == AUTH_TYPE.BASIC: kb.authHeader = getRequestHeader(req, HTTP_HEADER.AUTHORIZATION) if not kb.proxyAuthHeader and getRequestHeader( req, HTTP_HEADER.PROXY_AUTHORIZATION): kb.proxyAuthHeader = getRequestHeader( req, HTTP_HEADER.PROXY_AUTHORIZATION) # Return response object if response: return conn, None, None # Get HTTP response if hasattr(conn, 'redurl'): page = (threadData.lastRedirectMsg[1] if kb.redirectChoice == REDIRECTION.NO\ else Connect._connReadProxy(conn)) if not skipRead else None skipLogTraffic = kb.redirectChoice == REDIRECTION.NO code = conn.redcode else: page = Connect._connReadProxy(conn) if not skipRead else None code = code or conn.code responseHeaders = conn.info() responseHeaders[URI_HTTP_HEADER] = conn.geturl() page = decodePage( page, responseHeaders.get(HTTP_HEADER.CONTENT_ENCODING), responseHeaders.get(HTTP_HEADER.CONTENT_TYPE)) status = getUnicode(conn.msg) if extractRegexResult(META_REFRESH_REGEX, page) and not refreshing: url = extractRegexResult(META_REFRESH_REGEX, page) debugMsg = "got HTML meta refresh header" logger.debug(debugMsg) if kb.alwaysRefresh is None: msg = "sqlmap got a refresh request " msg += "(redirect like response common to login pages). " msg += "Do you want to apply the refresh " msg += "from now on (or stay on the original page)? [Y/n]" choice = readInput(msg, default="Y") kb.alwaysRefresh = choice not in ("n", "N") if kb.alwaysRefresh: if url.lower().startswith('http://'): kwargs['url'] = url else: kwargs['url'] = conf.url[:conf.url.rfind('/') + 1] + url threadData.lastRedirectMsg = (threadData.lastRequestUID, page) kwargs['refreshing'] = True kwargs['get'] = None kwargs['post'] = None try: return Connect._getPageProxy(**kwargs) except SqlmapSyntaxException: pass # Explicit closing of connection object if not conf.keepAlive: try: if hasattr(conn.fp, '_sock'): conn.fp._sock.close() conn.close() except Exception, msg: warnMsg = "problem occurred during connection closing ('%s')" % msg logger.warn(warnMsg) except urllib2.HTTPError, e: page = None responseHeaders = None try: page = e.read() if not skipRead else None responseHeaders = e.info() responseHeaders[URI_HTTP_HEADER] = e.geturl() page = decodePage( page, responseHeaders.get(HTTP_HEADER.CONTENT_ENCODING), responseHeaders.get(HTTP_HEADER.CONTENT_TYPE)) except socket.timeout: warnMsg = "connection timed out while trying " warnMsg += "to get error page information (%d)" % e.code logger.warn(warnMsg) return None, None, None except KeyboardInterrupt: raise except: pass finally: page = page if isinstance(page, unicode) else getUnicode(page) code = e.code threadData.lastHTTPError = (threadData.lastRequestUID, code) kb.httpErrorCodes[code] = kb.httpErrorCodes.get(code, 0) + 1 status = getUnicode(e.msg) responseMsg += "[#%d] (%d %s):\n" % (threadData.lastRequestUID, code, status) if responseHeaders: logHeaders = "\n".join("%s: %s" % (getUnicode(key.capitalize( ) if isinstance(key, basestring) else key), getUnicode(value)) for (key, value) in responseHeaders.items()) logHTTPTraffic( requestMsg, "%s%s\n\n%s" % (responseMsg, logHeaders, (page or "")[:MAX_CONNECTION_CHUNK_SIZE])) skipLogTraffic = True if conf.verbose <= 5: responseMsg += getUnicode(logHeaders) elif conf.verbose > 5: responseMsg += "%s\n\n%s" % (logHeaders, (page or "")[:MAX_CONNECTION_CHUNK_SIZE]) logger.log(CUSTOM_LOGGING.TRAFFIC_IN, responseMsg) if e.code == httplib.UNAUTHORIZED: errMsg = "not authorized, try to provide right HTTP " errMsg += "authentication type and valid credentials (%d)" % code raise SqlmapConnectionException(errMsg) elif e.code == httplib.NOT_FOUND: if raise404: errMsg = "page not found (%d)" % code raise SqlmapConnectionException(errMsg) else: debugMsg = "page not found (%d)" % code singleTimeLogMessage(debugMsg, logging.DEBUG) processResponse(page, responseHeaders) elif e.code == httplib.GATEWAY_TIMEOUT: if ignoreTimeout: return None, None, None else: warnMsg = "unable to connect to the target URL (%d - %s)" % ( e.code, httplib.responses[e.code]) if threadData.retriesCount < conf.retries and not kb.threadException: warnMsg += ". sqlmap is going to retry the request" logger.critical(warnMsg) return Connect._retryProxy(**kwargs) elif kb.testMode: logger.critical(warnMsg) return None, None, None else: raise SqlmapConnectionException(warnMsg) else: debugMsg = "got HTTP error code: %d (%s)" % (code, status) logger.debug(debugMsg) except (urllib2.URLError, socket.error, socket.timeout, httplib.BadStatusLine, httplib.IncompleteRead, ProxyError, SqlmapCompressionException), e: tbMsg = traceback.format_exc() if "no host given" in tbMsg: warnMsg = "invalid URL address used (%s)" % repr(url) raise SqlmapSyntaxException(warnMsg) elif "forcibly closed" in tbMsg: warnMsg = "connection was forcibly closed by the target URL" elif "timed out" in tbMsg: warnMsg = "connection timed out to the target URL" elif "URLError" in tbMsg or "error" in tbMsg: warnMsg = "unable to connect to the target URL" elif "BadStatusLine" in tbMsg: warnMsg = "connection dropped or unknown HTTP " warnMsg += "status code received. Try to force the HTTP User-Agent " warnMsg += "header with option '--user-agent' or switch '--random-agent'" elif "IncompleteRead" in tbMsg: warnMsg = "there was an incomplete read error while retrieving data " warnMsg += "from the target URL" else: warnMsg = "unable to connect to the target URL" if "BadStatusLine" not in tbMsg: warnMsg += " or proxy" if silent: return None, None, None elif "forcibly closed" in tbMsg: logger.critical(warnMsg) return None, None, None elif ignoreTimeout and any( _ in tbMsg for _ in ("timed out", "IncompleteRead")): return None, None, None elif threadData.retriesCount < conf.retries and not kb.threadException: warnMsg += ". sqlmap is going to retry the request" logger.critical(warnMsg) return Connect._retryProxy(**kwargs) elif kb.testMode: logger.critical(warnMsg) return None, None, None else: raise SqlmapConnectionException(warnMsg)
def _infinite_loop_check(self, req): if hasattr(req, 'redirect_dict') and (req.redirect_dict.get(req.get_full_url(), 0) >= MAX_SINGLE_URL_REDIRECTIONS or len(req.redirect_dict) >= MAX_TOTAL_REDIRECTIONS): errMsg = "infinite redirect loop detected (%s). " % ", ".join(item for item in req.redirect_dict.keys()) errMsg += "Please check all provided parameters and/or provide missing ones" raise SqlmapConnectionException(errMsg)
def _search(dork): """ This method performs the effective search on Google providing the google dork and the Google session cookie """ if not dork: return None data = None headers = {} headers[HTTP_HEADER.USER_AGENT] = dict(conf.httpHeaders).get( HTTP_HEADER.USER_AGENT, DUMMY_SEARCH_USER_AGENT) headers[HTTP_HEADER.ACCEPT_ENCODING] = HTTP_ACCEPT_ENCODING_HEADER_VALUE try: req = _urllib.request.Request("https://www.google.com/ncr", headers=headers) conn = _urllib.request.urlopen(req) except Exception as ex: errMsg = "unable to connect to Google ('%s')" % getSafeExString(ex) raise SqlmapConnectionException(errMsg) gpage = conf.googlePage if conf.googlePage > 1 else 1 logger.info("using search result page #%d" % gpage) url = "https://www.google.com/search?" url += "q=%s&" % urlencode(dork, convall=True) url += "num=100&hl=en&complete=0&safe=off&filter=0&btnG=Search" url += "&start=%d" % ((gpage - 1) * 100) try: req = _urllib.request.Request(url, headers=headers) conn = _urllib.request.urlopen(req) requestMsg = "HTTP request:\nGET %s" % url requestMsg += " %s" % _http_client.HTTPException._http_vsn_str logger.log(CUSTOM_LOGGING.TRAFFIC_OUT, requestMsg) page = conn.read() code = conn.code status = conn.msg responseHeaders = conn.info() page = decodePage(page, responseHeaders.get("Content-Encoding"), responseHeaders.get("Content-Type")) responseMsg = "HTTP response (%s - %d):\n" % (status, code) if conf.verbose <= 4: responseMsg += getUnicode(responseHeaders, UNICODE_ENCODING) elif conf.verbose > 4: responseMsg += "%s\n%s\n" % (responseHeaders, page) logger.log(CUSTOM_LOGGING.TRAFFIC_IN, responseMsg) except _urllib.error.HTTPError as ex: try: page = ex.read() except Exception as _: warnMsg = "problem occurred while trying to get " warnMsg += "an error page information (%s)" % getSafeExString(_) logger.critical(warnMsg) return None except (_urllib.error.URLError, _http_client.error, socket.error, socket.timeout, socks.ProxyError): errMsg = "unable to connect to Google" raise SqlmapConnectionException(errMsg) retVal = [ _urllib.parse.unquote(match.group(1) or match.group(2)) for match in re.finditer(GOOGLE_REGEX, page, re.I) ] if not retVal and "detected unusual traffic" in page: warnMsg = "Google has detected 'unusual' traffic from " warnMsg += "used IP address disabling further searches" if conf.proxyList: raise SqlmapBaseException(warnMsg) else: logger.critical(warnMsg) if not retVal: message = "no usable links found. What do you want to do?" message += "\n[1] (re)try with DuckDuckGo (default)" message += "\n[2] (re)try with Bing" message += "\n[3] quit" choice = readInput(message, default='1') if choice == '3': raise SqlmapUserQuitException elif choice == '2': url = "https://www.bing.com/search?q=%s&first=%d" % (urlencode( dork, convall=True), (gpage - 1) * 10 + 1) regex = BING_REGEX else: url = "https://duckduckgo.com/html/" data = "q=%s&s=%d" % (urlencode(dork, convall=True), (gpage - 1) * 30) regex = DUCKDUCKGO_REGEX try: req = _urllib.request.Request(url, data=data, headers=headers) conn = _urllib.request.urlopen(req) requestMsg = "HTTP request:\nGET %s" % url requestMsg += " %s" % _http_client.HTTPException._http_vsn_str logger.log(CUSTOM_LOGGING.TRAFFIC_OUT, requestMsg) page = conn.read() code = conn.code status = conn.msg responseHeaders = conn.info() page = decodePage(page, responseHeaders.get("Content-Encoding"), responseHeaders.get("Content-Type")) responseMsg = "HTTP response (%s - %d):\n" % (status, code) if conf.verbose <= 4: responseMsg += getUnicode(responseHeaders, UNICODE_ENCODING) elif conf.verbose > 4: responseMsg += "%s\n%s\n" % (responseHeaders, page) logger.log(CUSTOM_LOGGING.TRAFFIC_IN, responseMsg) except _urllib.error.HTTPError as ex: try: page = ex.read() page = decodePage(page, ex.headers.get("Content-Encoding"), ex.headers.get("Content-Type")) except socket.timeout: warnMsg = "connection timed out while trying " warnMsg += "to get error page information (%d)" % ex.code logger.critical(warnMsg) return None except: errMsg = "unable to connect" raise SqlmapConnectionException(errMsg) retVal = [ _urllib.parse.unquote(match.group(1).replace("&", "&")) for match in re.finditer(regex, page, re.I | re.S) ] if not retVal and "issue with the Tor Exit Node you are currently using" in page: warnMsg = "DuckDuckGo has detected 'unusual' traffic from " warnMsg += "used (Tor) IP address" if conf.proxyList: raise SqlmapBaseException(warnMsg) else: logger.critical(warnMsg) return retVal
def http_error_416(self, req, fp, code, msg, hdrs): # HTTP's Range Not Satisfiable error errMsg = "there was a problem while connecting " errMsg += "target ('406 - Range Not Satisfiable')" raise SqlmapConnectionException(errMsg)