Пример #1
0
    def _process_http_redirect(self, result, headers, code, content, msg,
                               redurl):
        content = decodePage(content, headers.get(HTTPHEADER.CONTENT_ENCODING),
                             headers.get(HTTPHEADER.CONTENT_TYPE))

        threadData = getCurrentThreadData()
        threadData.lastRedirectMsg = (threadData.lastRequestUID, content)

        responseMsg = "HTTP response "
        responseMsg += "[#%d] (%d %s):\n" % (threadData.lastRequestUID, code,
                                             getUnicode(msg))

        if headers:
            logHeaders = "\n".join(
                "%s: %s" %
                (key.capitalize() if isinstance(key, basestring) else key,
                 getUnicode(value)) for (key, value) in headers.items())
        else:
            logHeaders = ""

        logHTTPTraffic(threadData.lastRequestMsg,
                       "%s%s" % (responseMsg, logHeaders))

        responseMsg += getUnicode(logHeaders)

        logger.log(7, responseMsg)

        if "set-cookie" in headers:
            kb.redirectSetCookie = headers["set-cookie"].split("; path")[0]

        result.redcode = code
        result.redurl = redurl

        return result
Пример #2
0
    def common_http_redirect(self, result, headers, code, content, msg):
        content = decodePage(content, headers.get(HTTPHEADER.CONTENT_ENCODING), headers.get(HTTPHEADER.CONTENT_TYPE))

        threadData = getCurrentThreadData()
        threadData.lastRedirectMsg = (threadData.lastRequestUID, content)

        responseMsg = "HTTP response "
        responseMsg += "[#%d] (%d %s):\n" % (threadData.lastRequestUID, code, getUnicode(msg))

        if headers:
            logHeaders = "\n".join(["%s: %s" % (key.capitalize() if isinstance(key, basestring) else key, getUnicode(value)) for (key, value) in headers.items()])
        else:
            logHeaders = ""

        logHTTPTraffic(threadData.lastRequestMsg, "%s%s" % (responseMsg, logHeaders))

        responseMsg += getUnicode(logHeaders)

        logger.log(7, responseMsg)

        if result:
            if self._get_header_redirect(headers):
                result.redurl = self._get_header_redirect(headers)

            if hasattr(result, 'redurl'):
                if not urlparse.urlsplit(result.redurl).netloc:
                    result.redurl = urlparse.urljoin(conf.url, result.redurl)

            if "set-cookie" in headers:
                result.setcookie = headers["set-cookie"].split("; path")[0]

            result.redcode = code

        return result
Пример #3
0
    def common_http_redirect(self, result, headers, code, content, msg):
        content = decodePage(content, headers.get(HTTPHEADER.CONTENT_ENCODING), headers.get(HTTPHEADER.CONTENT_TYPE))

        threadData = getCurrentThreadData()
        threadData.lastRedirectMsg = (threadData.lastRequestUID, content)

        responseMsg = "HTTP response "
        responseMsg += "[#%d] (%d %s):\n" % (threadData.lastRequestUID, code, getUnicode(msg))

        if headers:
            logHeaders = "\n".join(["%s: %s" % (key.capitalize() if isinstance(key, basestring) else key, getUnicode(value)) for (key, value) in headers.items()])
        else:
            logHeaders = ""

        if conf.verbose <= 5:
            responseMsg += getUnicode(logHeaders)
        elif conf.verbose > 5:
            responseMsg += "%s\n\n%s\n" % (logHeaders, content)

        logger.log(7, responseMsg)

        if "location" in headers:
            result.redurl = headers.getheaders("location")[0].split("?")[0]
        elif "uri" in headers:
            result.redurl = headers.getheaders("uri")[0].split("?")[0]

        if "set-cookie" in headers:
            result.setcookie = headers["set-cookie"].split("; path")[0]

        result.redcode = code

        return result
Пример #4
0
    def _process_http_redirect(self, result, headers, code, content, msg, redurl):
        content = decodePage(content, headers.get(HTTPHEADER.CONTENT_ENCODING), headers.get(HTTPHEADER.CONTENT_TYPE))

        threadData = getCurrentThreadData()
        threadData.lastRedirectMsg = (threadData.lastRequestUID, content)

        responseMsg = "HTTP response "
        responseMsg += "[#%d] (%d %s):\n" % (threadData.lastRequestUID, code, getUnicode(msg))

        if headers:
            logHeaders = "\n".join("%s: %s" % (key.capitalize() if isinstance(key, basestring) else key, getUnicode(value)) for (key, value) in headers.items())
        else:
            logHeaders = ""

        logHTTPTraffic(threadData.lastRequestMsg, "%s%s" % (responseMsg, logHeaders))

        responseMsg += getUnicode(logHeaders)

        logger.log(7, responseMsg)

        if "set-cookie" in headers:
            kb.redirectSetCookie = headers["set-cookie"].split("; path")[0]

        result.redcode = code
        result.redurl = redurl

        return result
Пример #5
0
    def search(self, dork):
        """
        This method performs the effective search on Google providing
        the google dork and the Google session cookie
        """

        ### --gpage num @@@ use glgoo dork results from specified page num
        gpage = conf.googlePage if conf.googlePage > 1 else 1
        logger.info("using Google result page #%d" % gpage)

        if not dork:
            return None

        #url = "http://www.google.com/search?"
        url = "http://cn.bing.com/search?"
        url += "q=%s&" % urlencode(dork, convall=True)
        url += "num=100&hl=en&complete=0&safe=off&filter=0&btnG=Search"
        url += "&start=%d" % ((gpage - 1) * 100)

        try:
            conn = self.opener.open(url)

            requestMsg = "HTTP request:\nGET %s" % url
            requestMsg += " %s" % httplib.HTTPConnection._http_vsn_str
            logger.log(CUSTOM_LOGGING.TRAFFIC_OUT, requestMsg)

            page = conn.read()
            code = conn.code
            status = conn.msg
            responseHeaders = conn.info()
            page = decodePage(page, responseHeaders.get("Content-Encoding"),
                              responseHeaders.get("Content-Type"))
            print page

            responseMsg = "HTTP response (%s - %d):\n" % (status, code)

            if conf.verbose <= 4:
                responseMsg += getUnicode(responseHeaders, UNICODE_ENCODING)
            elif conf.verbose > 4:
                responseMsg += "%s\n%s\n" % (responseHeaders, page)

            logger.log(CUSTOM_LOGGING.TRAFFIC_IN, responseMsg)
        except urllib2.HTTPError, e:
            try:
                page = e.read()
            except socket.timeout:
                warnMsg = "connection timed out while trying "
                warnMsg += "to get error page information (%d)" % e.code
                logger.critical(warnMsg)
                return None
Пример #6
0
    def search(self, dork):
        """
        This method performs the effective search on Google providing
        the google dork and the Google session cookie
        """

        ### --gpage num @@@ use glgoo dork results from specified page num
        gpage = conf.googlePage if conf.googlePage > 1 else 1
        logger.info("using Google result page #%d" % gpage)

        if not dork:
            return None

        #url = "http://www.google.com/search?"
        url = "http://cn.bing.com/search?"
        url += "q=%s&" % urlencode(dork, convall=True)
        url += "num=100&hl=en&complete=0&safe=off&filter=0&btnG=Search"
        url += "&start=%d" % ((gpage - 1) * 100)

        try:
            conn = self.opener.open(url)

            requestMsg = "HTTP request:\nGET %s" % url
            requestMsg += " %s" % httplib.HTTPConnection._http_vsn_str
            logger.log(CUSTOM_LOGGING.TRAFFIC_OUT, requestMsg)

            page = conn.read()
            code = conn.code
            status = conn.msg
            responseHeaders = conn.info()
            page = decodePage(page, responseHeaders.get("Content-Encoding"), responseHeaders.get("Content-Type"))
            print page

            responseMsg = "HTTP response (%s - %d):\n" % (status, code)

            if conf.verbose <= 4:
                responseMsg += getUnicode(responseHeaders, UNICODE_ENCODING)
            elif conf.verbose > 4:
                responseMsg += "%s\n%s\n" % (responseHeaders, page)

            logger.log(CUSTOM_LOGGING.TRAFFIC_IN, responseMsg)
        except urllib2.HTTPError, e:
            try:
                page = e.read()
            except socket.timeout:
                warnMsg = "connection timed out while trying "
                warnMsg += "to get error page information (%d)" % e.code
                logger.critical(warnMsg)
                return None
Пример #7
0
    def search(self, googleDork):
        """
        This method performs the effective search on Google providing
        the google dork and the Google session cookie
        """

        gpage = conf.googlePage if conf.googlePage > 1 else 1
        logger.info("using Google result page #%d" % gpage)

        if not googleDork:
            return None

        url  = "http://www.google.com/search?"
        url += "q=%s&" % urlencode(googleDork)
        url += "num=100&hl=en&safe=off&filter=0&btnG=Search"
        url += "&start=%d" % ((gpage-1) * 100)

        try:
            conn = self.opener.open(url)

            requestMsg = "HTTP request:\nGET %s" % url
            requestMsg += " %s" % httplib.HTTPConnection._http_vsn_str
            logger.log(8, requestMsg)

            page = conn.read()
            code = conn.code
            status = conn.msg
            responseHeaders = conn.info()
            page = decodePage(page, responseHeaders.get("Content-Encoding"), responseHeaders.get("Content-Type"))

            responseMsg = "HTTP response (%s - %d):\n" % (status, code)

            if conf.verbose <= 4:
                responseMsg += getUnicode(responseHeaders)
            elif conf.verbose > 4:
                responseMsg += "%s\n%s\n" % (responseHeaders, page)

            logger.log(7, responseMsg)
        except urllib2.HTTPError, e:
            try:
                page = e.read()
            except socket.timeout:
                warnMsg  = "connection timed out while trying "
                warnMsg += "to get error page information (%d)" % e.code
                logger.critical(warnMsg)
                return None
Пример #8
0
    def search(self, googleDork):
        """
        This method performs the effective search on Google providing
        the google dork and the Google session cookie
        """

        gpage = conf.googlePage if conf.googlePage > 1 else 1
        logger.info("using Google result page #%d" % gpage)
        
        if not googleDork:
            return None

        url  = "http://www.google.com/search?"
        url += "q=%s&" % urlencode(googleDork)
        url += "num=100&hl=en&safe=off&filter=0&btnG=Search"
        url += "&start=%d" % ((gpage-1) * 100)

        try:
            conn = self.opener.open(url)

            requestMsg = "HTTP request:\nGET %s HTTP/1.1" % url
            #requestHeaders = "\n".join(["%s: %s" % (header, value) for header, value in conn.headers.items()])
            #requestMsg += "\n%s" % requestHeaders
            requestMsg += "\n"
            logger.log(9, requestMsg)

            page            = conn.read()
            code            = conn.code
            status          = conn.msg
            responseHeaders = conn.info()

            encoding = responseHeaders.get("Content-Encoding")
            page = decodePage(page, encoding)

            responseMsg = "HTTP response (%s - %d):\n" % (status, code)
    
            if conf.verbose <= 4:
                responseMsg += str(responseHeaders)
            elif conf.verbose > 4:
                responseMsg += "%s\n%s\n" % (responseHeaders, page)
    
            logger.log(8, responseMsg)
        except urllib2.HTTPError, e:
            page = e.read()
Пример #9
0
    def common_http_redirect(self, result, headers, code, content, msg):
        content = decodePage(content, headers.get(HTTPHEADER.CONTENT_ENCODING),
                             headers.get(HTTPHEADER.CONTENT_TYPE))

        threadData = getCurrentThreadData()
        threadData.lastRedirectMsg = (threadData.lastRequestUID, content)

        responseMsg = "HTTP response "
        responseMsg += "[#%d] (%d %s):\n" % (threadData.lastRequestUID, code,
                                             getUnicode(msg))

        if headers:
            logHeaders = "\n".join([
                "%s: %s" %
                (key.capitalize() if isinstance(key, basestring) else key,
                 getUnicode(value)) for (key, value) in headers.items()
            ])
        else:
            logHeaders = ""

        if conf.verbose <= 5:
            responseMsg += getUnicode(logHeaders)
        elif conf.verbose > 5:
            responseMsg += "%s\n\n%s\n" % (logHeaders, content)

        logger.log(7, responseMsg)

        if "location" in headers:
            result.redurl = headers.getheaders("location")[0].split("?")[0]
        elif "uri" in headers:
            result.redurl = headers.getheaders("uri")[0].split("?")[0]

        if "set-cookie" in headers:
            result.setcookie = headers["set-cookie"].split("; path")[0]

        result.redcode = code

        return result
Пример #10
0
    def getPage(**kwargs):
        """
        This method connects to the target URL or proxy and returns
        the target URL page content
        """

        if isinstance(conf.delay, (int, float)) and conf.delay > 0:
            time.sleep(conf.delay)
        elif conf.cpuThrottle:
            cpuThrottle(conf.cpuThrottle)

        if conf.dummy:
            return randomStr(int(randomInt()), alphabet=[chr(_) for _ in xrange(256)]), {}, int(randomInt())

        threadData = getCurrentThreadData()
        with kb.locks.request:
            kb.requestCounter += 1
            threadData.lastRequestUID = kb.requestCounter

        url = kwargs.get("url",                     None) or conf.url
        get = kwargs.get("get",                     None)
        post = kwargs.get("post",                   None)
        method = kwargs.get("method",               None)
        cookie = kwargs.get("cookie",               None)
        ua = kwargs.get("ua",                       None) or conf.agent
        referer = kwargs.get("referer",             None) or conf.referer
        host = kwargs.get("host",                   None) or conf.host
        direct_ = kwargs.get("direct",              False)
        multipart = kwargs.get("multipart",         False)
        silent = kwargs.get("silent",               False)
        raise404 = kwargs.get("raise404",           True)
        timeout = kwargs.get("timeout",             None) or conf.timeout
        auxHeaders = kwargs.get("auxHeaders",       None)
        response = kwargs.get("response",           False)
        ignoreTimeout = kwargs.get("ignoreTimeout", False) or kb.ignoreTimeout
        refreshing = kwargs.get("refreshing",       False)
        retrying = kwargs.get("retrying",           False)
        crawling = kwargs.get("crawling",           False)
        skipRead = kwargs.get("skipRead",           False)

        if not urlparse.urlsplit(url).netloc:
            url = urlparse.urljoin(conf.url, url)

        # flag to know if we are dealing with the same target host
        target = reduce(lambda x, y: x == y, map(lambda x: urlparse.urlparse(x).netloc.split(':')[0], [url, conf.url or ""]))

        if not retrying:
            # Reset the number of connection retries
            threadData.retriesCount = 0

        # fix for known issue when urllib2 just skips the other part of provided
        # url splitted with space char while urlencoding it in the later phase
        url = url.replace(" ", "%20")

        conn = None
        code = None
        page = None

        _ = urlparse.urlsplit(url)
        requestMsg = u"HTTP request [#%d]:\n%s " % (threadData.lastRequestUID, method or (HTTPMETHOD.POST if post is not None else HTTPMETHOD.GET))
        requestMsg += ("%s%s" % (_.path or "/", ("?%s" % _.query) if _.query else "")) if not any((refreshing, crawling)) else url
        responseMsg = u"HTTP response "
        requestHeaders = u""
        responseHeaders = None
        logHeaders = u""
        skipLogTraffic = False

        raise404 = raise404 and not kb.ignoreNotFound

        # support for non-latin (e.g. cyrillic) URLs as urllib/urllib2 doesn't
        # support those by default
        url = asciifyUrl(url)

        try:
            socket.setdefaulttimeout(timeout)

            if direct_:
                if '?' in url:
                    url, params = url.split('?', 1)
                    params = urlencode(params)
                    url = "%s?%s" % (url, params)

            elif multipart:
                # Needed in this form because of potential circle dependency
                # problem (option -> update -> connect -> option)
                from lib.core.option import proxyHandler

                multipartOpener = urllib2.build_opener(proxyHandler, multipartpost.MultipartPostHandler)
                conn = multipartOpener.open(unicodeencode(url), multipart)
                page = Connect._connReadProxy(conn) if not skipRead else None
                responseHeaders = conn.info()
                responseHeaders[URI_HTTP_HEADER] = conn.geturl()
                page = decodePage(page, responseHeaders.get(HTTP_HEADER.CONTENT_ENCODING), responseHeaders.get(HTTP_HEADER.CONTENT_TYPE))

                return page

            elif any((refreshing, crawling)):
                pass

            elif target:
                if conf.forceSSL and urlparse.urlparse(url).scheme != "https":
                    url = re.sub("\Ahttp:", "https:", url, re.I)
                    url = re.sub(":80/", ":443/", url, re.I)

                if PLACE.GET in conf.parameters and not get:
                    get = conf.parameters[PLACE.GET]

                    if not conf.skipUrlEncode:
                        get = urlencode(get, limit=True)

                if get:
                    if '?' in url:
                        url = "%s%s%s" % (url, DEFAULT_GET_POST_DELIMITER, get)
                        requestMsg += "%s%s" % (DEFAULT_GET_POST_DELIMITER, get)
                    else:
                        url = "%s?%s" % (url, get)
                        requestMsg += "?%s" % get

                if PLACE.POST in conf.parameters and not post and method != HTTPMETHOD.GET:
                    post = conf.parameters[PLACE.POST]

            elif get:
                url = "%s?%s" % (url, get)
                requestMsg += "?%s" % get

            requestMsg += " %s" % httplib.HTTPConnection._http_vsn_str

            # Prepare HTTP headers
            headers = forgeHeaders({HTTP_HEADER.COOKIE: cookie, HTTP_HEADER.USER_AGENT: ua, HTTP_HEADER.REFERER: referer, HTTP_HEADER.HOST: host})

            if kb.authHeader:
                headers[HTTP_HEADER.AUTHORIZATION] = kb.authHeader

            if kb.proxyAuthHeader:
                headers[HTTP_HEADER.PROXY_AUTHORIZATION] = kb.proxyAuthHeader

            if HTTP_HEADER.ACCEPT not in headers:
                headers[HTTP_HEADER.ACCEPT] = HTTP_ACCEPT_HEADER_VALUE

            if HTTP_HEADER.HOST not in headers:
                headers[HTTP_HEADER.HOST] = getHostHeader(url)

            if HTTP_HEADER.ACCEPT_ENCODING not in headers:
                headers[HTTP_HEADER.ACCEPT_ENCODING] = HTTP_ACCEPT_ENCODING_HEADER_VALUE if kb.pageCompress else "identity"

            if post is not None and HTTP_HEADER.CONTENT_TYPE not in headers:
                headers[HTTP_HEADER.CONTENT_TYPE] = POST_HINT_CONTENT_TYPES.get(kb.postHint, DEFAULT_CONTENT_TYPE)

            if headers.get(HTTP_HEADER.CONTENT_TYPE) == POST_HINT_CONTENT_TYPES[POST_HINT.MULTIPART]:
                warnMsg = "missing 'boundary parameter' in '%s' header. " % HTTP_HEADER.CONTENT_TYPE
                warnMsg += "Will try to reconstruct"
                singleTimeWarnMessage(warnMsg)

                boundary = findMultipartPostBoundary(conf.data)
                if boundary:
                    headers[HTTP_HEADER.CONTENT_TYPE] = "%s; boundary=%s" % (headers[HTTP_HEADER.CONTENT_TYPE], boundary)

            if auxHeaders:
                for key, item in auxHeaders.items():
                    for _ in headers.keys():
                        if _.upper() == key.upper():
                            del headers[_]
                    headers[key] = item

            for key, item in headers.items():
                del headers[key]
                headers[unicodeencode(key, kb.pageEncoding)] = unicodeencode(item, kb.pageEncoding)

            url = unicodeencode(url)
            post = unicodeencode(post, kb.pageEncoding)

            if method and method not in (HTTPMETHOD.GET, HTTPMETHOD.POST):
                method = unicodeencode(method)
                req = MethodRequest(url, post, headers)
                req.set_method(method)
            else:
                req = urllib2.Request(url, post, headers)

            requestHeaders += "\n".join("%s: %s" % (getUnicode(key.capitalize() if isinstance(key, basestring) else key), getUnicode(value)) for (key, value) in req.header_items())

            if not getRequestHeader(req, HTTP_HEADER.COOKIE) and conf.cj:
                conf.cj._policy._now = conf.cj._now = int(time.time())
                cookies = conf.cj._cookies_for_request(req)
                requestHeaders += "\n%s" % ("Cookie: %s" % ";".join("%s=%s" % (getUnicode(cookie.name), getUnicode(cookie.value)) for cookie in cookies))

            if post is not None:
                if not getRequestHeader(req, HTTP_HEADER.CONTENT_LENGTH):
                    requestHeaders += "\n%s: %d" % (string.capwords(HTTP_HEADER.CONTENT_LENGTH), len(post))

            if not getRequestHeader(req, HTTP_HEADER.CONNECTION):
                requestHeaders += "\n%s: close" % HTTP_HEADER.CONNECTION

            requestMsg += "\n%s" % requestHeaders

            if post is not None:
                requestMsg += "\n\n%s" % getUnicode(post)

            requestMsg += "\n"

            threadData.lastRequestMsg = requestMsg

            logger.log(CUSTOM_LOGGING.TRAFFIC_OUT, requestMsg)

            conn = urllib2.urlopen(req)

            if not kb.authHeader and getRequestHeader(req, HTTP_HEADER.AUTHORIZATION) and (conf.authType or "").lower() == AUTH_TYPE.BASIC.lower():
                kb.authHeader = getRequestHeader(req, HTTP_HEADER.AUTHORIZATION)

            if not kb.proxyAuthHeader and getRequestHeader(req, HTTP_HEADER.PROXY_AUTHORIZATION):
                kb.proxyAuthHeader = getRequestHeader(req, HTTP_HEADER.PROXY_AUTHORIZATION)

            # Return response object
            if response:
                return conn, None, None

            # Get HTTP response
            if hasattr(conn, 'redurl'):
                page = (threadData.lastRedirectMsg[1] if kb.redirectChoice == REDIRECTION.NO\
                  else Connect._connReadProxy(conn)) if not skipRead else None
                skipLogTraffic = kb.redirectChoice == REDIRECTION.NO
                code = conn.redcode
            else:
                page = Connect._connReadProxy(conn) if not skipRead else None

            code = code or conn.code
            responseHeaders = conn.info()
            responseHeaders[URI_HTTP_HEADER] = conn.geturl()
            page = decodePage(page, responseHeaders.get(HTTP_HEADER.CONTENT_ENCODING), responseHeaders.get(HTTP_HEADER.CONTENT_TYPE))
            status = getUnicode(conn.msg)

            if extractRegexResult(META_REFRESH_REGEX, page) and not refreshing:
                url = extractRegexResult(META_REFRESH_REGEX, page)

                debugMsg = "got HTML meta refresh header"
                logger.debug(debugMsg)

                if kb.alwaysRefresh is None:
                    msg = "sqlmap got a refresh request "
                    msg += "(redirect like response common to login pages). "
                    msg += "Do you want to apply the refresh "
                    msg += "from now on (or stay on the original page)? [Y/n]"
                    choice = readInput(msg, default="Y")

                    kb.alwaysRefresh = choice not in ("n", "N")

                if kb.alwaysRefresh:
                    if url.lower().startswith('http://'):
                        kwargs['url'] = url
                    else:
                        kwargs['url'] = conf.url[:conf.url.rfind('/') + 1] + url

                    threadData.lastRedirectMsg = (threadData.lastRequestUID, page)
                    kwargs['refreshing'] = True
                    kwargs['get'] = None
                    kwargs['post'] = None

                    try:
                        return Connect._getPageProxy(**kwargs)
                    except SqlmapSyntaxException:
                        pass

            # Explicit closing of connection object
            if not conf.keepAlive:
                try:
                    if hasattr(conn.fp, '_sock'):
                        conn.fp._sock.close()
                    conn.close()
                except Exception, msg:
                    warnMsg = "problem occurred during connection closing ('%s')" % msg
                    logger.warn(warnMsg)

        except urllib2.HTTPError, e:
            page = None
            responseHeaders = None

            try:
                page = e.read() if not skipRead else None
                responseHeaders = e.info()
                responseHeaders[URI_HTTP_HEADER] = e.geturl()
                page = decodePage(page, responseHeaders.get(HTTP_HEADER.CONTENT_ENCODING), responseHeaders.get(HTTP_HEADER.CONTENT_TYPE))
            except socket.timeout:
                warnMsg = "connection timed out while trying "
                warnMsg += "to get error page information (%d)" % e.code
                logger.warn(warnMsg)
                return None, None, None
            except KeyboardInterrupt:
                raise
            except:
                pass
            finally:
                page = page if isinstance(page, unicode) else getUnicode(page)

            code = e.code

            kb.originalCode = kb.originalCode or code
            threadData.lastHTTPError = (threadData.lastRequestUID, code)
            kb.httpErrorCodes[code] = kb.httpErrorCodes.get(code, 0) + 1

            status = getUnicode(e.msg)
            responseMsg += "[#%d] (%d %s):\n" % (threadData.lastRequestUID, code, status)

            if responseHeaders:
                logHeaders = "\n".join("%s: %s" % (getUnicode(key.capitalize() if isinstance(key, basestring) else key), getUnicode(value)) for (key, value) in responseHeaders.items())

            logHTTPTraffic(requestMsg, "%s%s\n\n%s" % (responseMsg, logHeaders, (page or "")[:MAX_CONNECTION_CHUNK_SIZE]))

            skipLogTraffic = True

            if conf.verbose <= 5:
                responseMsg += getUnicode(logHeaders)
            elif conf.verbose > 5:
                responseMsg += "%s\n\n%s" % (logHeaders, (page or "")[:MAX_CONNECTION_CHUNK_SIZE])

            logger.log(CUSTOM_LOGGING.TRAFFIC_IN, responseMsg)

            if e.code == httplib.UNAUTHORIZED and not conf.ignore401:
                errMsg = "not authorized, try to provide right HTTP "
                errMsg += "authentication type and valid credentials (%d)" % code
                raise SqlmapConnectionException(errMsg)
            elif e.code == httplib.NOT_FOUND:
                if raise404:
                    errMsg = "page not found (%d)" % code
                    raise SqlmapConnectionException(errMsg)
                else:
                    debugMsg = "page not found (%d)" % code
                    singleTimeLogMessage(debugMsg, logging.DEBUG)
                    processResponse(page, responseHeaders)
            elif e.code == httplib.GATEWAY_TIMEOUT:
                if ignoreTimeout:
                    return None, None, None
                else:
                    warnMsg = "unable to connect to the target URL (%d - %s)" % (e.code, httplib.responses[e.code])
                    if threadData.retriesCount < conf.retries and not kb.threadException:
                        warnMsg += ". sqlmap is going to retry the request"
                        logger.critical(warnMsg)
                        return Connect._retryProxy(**kwargs)
                    elif kb.testMode:
                        logger.critical(warnMsg)
                        return None, None, None
                    else:
                        raise SqlmapConnectionException(warnMsg)
            else:
                debugMsg = "got HTTP error code: %d (%s)" % (code, status)
                logger.debug(debugMsg)
Пример #11
0
    def getPage(**kwargs):
        """
        This method connects to the target url or proxy and returns
        the target url page content
        """

        if conf.delay is not None and isinstance(conf.delay, (int, float)) and conf.delay > 0:
            time.sleep(conf.delay)
        elif conf.cpuThrottle:
            delay = 0.00001 * (conf.cpuThrottle ** 2)
            time.sleep(delay)

        kb.locks.reqLock.acquire()

        kb.lastRequestUID += 1
        requestID = kb.lastRequestUID

        kb.locks.reqLock.release()

        url             = kwargs.get('url',        conf.url).replace(" ", "%20")
        get             = kwargs.get('get',        None)
        post            = kwargs.get('post',       None)
        method          = kwargs.get('method',     None)
        cookie          = kwargs.get('cookie',     None)
        ua              = kwargs.get('ua',         None)
        direct          = kwargs.get('direct',     False)
        multipart       = kwargs.get('multipart',  False)
        silent          = kwargs.get('silent',     False)
        raise404        = kwargs.get('raise404',   True)
        auxHeaders      = kwargs.get('auxHeaders', None)
        response        = kwargs.get('response',   False)

        page            = ""
        cookieStr       = ""
        requestMsg      = "HTTP request [#%d]:\n%s " % (requestID, conf.method)
        requestMsg     += "%s" % urlparse.urlsplit(url)[2] or "/"
        responseMsg     = "HTTP response "
        requestHeaders  = ""
        responseHeaders = ""
        logHeaders      = ""

        try:
            if silent:
                socket.setdefaulttimeout(3)

            if direct:
                if "?" in url:
                    url, params = url.split("?")
                    params = urlencode(params)
                    url = "%s?%s" % (url, params)
                    requestMsg += "?%s" % params

            elif multipart:
                # Needed in this form because of potential circle dependency 
                # problem (option -> update -> connect -> option)
                from lib.core.option import proxyHandler

                multipartOpener = urllib2.build_opener(proxyHandler, multipartpost.MultipartPostHandler)
                conn = multipartOpener.open(url, multipart)
                page = conn.read()
                responseHeaders = conn.info()
                page = decodePage(page, responseHeaders.get("Content-Encoding"), responseHeaders.get("Content-Type"))

                return page

            else:
                if conf.parameters.has_key(PLACE.GET) and not get:
                    get = conf.parameters[PLACE.GET]

                if get:
                    url = "%s?%s" % (url, get)
                    requestMsg += "?%s" % get

                if conf.method == HTTPMETHOD.POST:
                    if conf.parameters.has_key(PLACE.POST) and not post:
                        post = conf.parameters[PLACE.POST]

            requestMsg += " %s" % httplib.HTTPConnection._http_vsn_str

            # Perform HTTP request
            headers = forgeHeaders(cookie, ua)

            if kb.authHeader:
                headers["Authorization"] = kb.authHeader

            if kb.proxyAuthHeader:
                headers["Proxy-authorization"] = kb.proxyAuthHeader

            if auxHeaders:
                for key, item in auxHeaders.items():
                    headers[key] = item

            if method:
                req = MethodRequest(url, post, headers)
                req.set_method(method)
            else:
                req = urllib2.Request(url, post, headers)

            if not conf.dropSetCookie and conf.cj:
                for _, cookie in enumerate(conf.cj):
                    if not cookieStr:
                        cookieStr = "Cookie: "

                    cookie = getUnicode(cookie)
                    index  = cookie.index(" for ")

                    cookieStr += "%s; " % cookie[8:index]

            conn = urllib2.urlopen(req)

            if not req.has_header("Accept-Encoding"):
                requestHeaders += "Accept-Encoding: identity\n"

            requestHeaders += "\n".join(["%s: %s" % (header, value) for header, value in req.header_items()])

            if not req.has_header("Cookie") and cookieStr:
                requestHeaders += "\n%s" % cookieStr[:-2]

            if not req.has_header("Connection"):
                requestHeaders += "\nConnection: close"

            requestMsg += "\n%s" % requestHeaders

            if post:
                requestMsg += "\n%s" % post

            requestMsg += "\n"

            logger.log(8, requestMsg)

            if not kb.authHeader and req.has_header("Authorization"):
                kb.authHeader = req.get_header("Authorization")

            if not kb.proxyAuthHeader and req.has_header("Proxy-authorization"):
                kb.proxyAuthHeader = req.get_header("Proxy-authorization")

            if hasattr(conn, "redurl") and hasattr(conn, "redcode") and not conf.redirectHandled:
                msg  = "sqlmap got a %d redirect to " % conn.redcode
                msg += "%s - What target address do you " % conn.redurl
                msg += "want to use from now on? %s " % conf.url
                msg += "(default) or provide another target address based "
                msg += "also on the redirection got from the application\n"

                while True:
                    choice = readInput(msg, default="1")

                    if not choice or choice == "1":
                        pass
                    else:
                        conf.url = choice
                        return Connect.__getPageProxy(**kwargs)

                    break

                conf.redirectHandled = True

            # Reset the number of connection retries
            conf.retriesCount = 0

            # Return response object
            if response:
                return conn, None

            # Get HTTP response
            page = conn.read()
            code = conn.code
            status = conn.msg
            responseHeaders = conn.info()
            page = decodePage(page, responseHeaders.get("Content-Encoding"), responseHeaders.get("Content-Type"))

        except urllib2.HTTPError, e:
            code = e.code
            status = e.msg

            try:
                page = e.read()
                responseHeaders = e.info()
            except socket.timeout:
                warnMsg  = "connection timed out while trying "
                warnMsg += "to get error page information (%d)" % code
                logger.warn(warnMsg)
                return None, None
            except:
                pass

            responseMsg = "\n%s[#%d] (%d %s):\n" % (responseMsg, requestID, code, status)

            if responseHeaders:
                logHeaders = "\n".join(["%s: %s" % (key.capitalize() if isinstance(key, basestring) else key, value) for (key, value) in responseHeaders.items()])

            logHTTPTraffic(requestMsg, "%s%s\n\n%s" % (responseMsg, logHeaders, page))

            if e.code == 401:
                errMsg  = "not authorized, try to provide right HTTP "
                errMsg += "authentication type and valid credentials (%d)" % code
                raise sqlmapConnectionException, errMsg
            elif e.code == 404 and raise404:
                errMsg = "page not found (%d)" % code
                raise sqlmapConnectionException, errMsg
            else:
                debugMsg = "got HTTP error code: %d (%s)" % (code, status)
                logger.debug(debugMsg)
Пример #12
0
            self.opener.addheaders = [_ for _ in self.opener.addheaders if _[0].lower() != HTTP_HEADER.ACCEPT_ENCODING.lower()]
            self.opener.addheaders.append((HTTP_HEADER.ACCEPT_ENCODING, HTTP_ACCEPT_ENCODING_HEADER_VALUE))

            try:
                conn = self.opener.open(url)

                requestMsg = "HTTP request:\nGET %s" % url
                requestMsg += " %s" % httplib.HTTPConnection._http_vsn_str
                logger.log(CUSTOM_LOGGING.TRAFFIC_OUT, requestMsg)

                page = conn.read()
                code = conn.code
                status = conn.msg
                responseHeaders = conn.info()
                page = decodePage(page, responseHeaders.get("Content-Encoding"), responseHeaders.get("Content-Type"))

                responseMsg = "HTTP response (%s - %d):\n" % (status, code)

                if conf.verbose <= 4:
                    responseMsg += getUnicode(responseHeaders, UNICODE_ENCODING)
                elif conf.verbose > 4:
                    responseMsg += "%s\n%s\n" % (responseHeaders, page)

                logger.log(CUSTOM_LOGGING.TRAFFIC_IN, responseMsg)
            except urllib2.HTTPError, e:
                try:
                    page = e.read()
                except socket.timeout:
                    warnMsg = "connection timed out while trying "
                    warnMsg += "to get error page information (%d)" % e.code
Пример #13
0
    def getPage(**kwargs):
        """
        This method connects to the target url or proxy and returns
        the target url page content
        """

        if conf.delay is not None and isinstance(
                conf.delay, (int, float)) and conf.delay > 0:
            time.sleep(conf.delay)
        elif conf.cpuThrottle:
            cpuThrottle(conf.cpuThrottle)

        threadData = getCurrentThreadData()
        threadData.lastRequestUID += 1

        url = kwargs.get('url', conf.url)
        get = kwargs.get('get', None)
        post = kwargs.get('post', None)
        method = kwargs.get('method', None)
        cookie = kwargs.get('cookie', None)
        ua = kwargs.get('ua', None)
        referer = kwargs.get('referer', None)
        host = kwargs.get('host', conf.host)
        direct = kwargs.get('direct', False)
        multipart = kwargs.get('multipart', False)
        silent = kwargs.get('silent', False)
        raise404 = kwargs.get('raise404', True)
        auxHeaders = kwargs.get('auxHeaders', None)
        response = kwargs.get('response', False)
        ignoreTimeout = kwargs.get('ignoreTimeout', kb.ignoreTimeout)
        refreshing = kwargs.get('refreshing', False)
        retrying = kwargs.get('retrying', False)
        crawling = kwargs.get('crawling', False)

        if not urlparse.urlsplit(url).netloc:
            url = urlparse.urljoin(conf.url, url)

        # flag to know if we are dealing with the same target host
        target = reduce(
            lambda x, y: x == y,
            map(lambda x: urlparse.urlparse(x).netloc.split(':')[0],
                [url, conf.url or ""]))

        if not retrying:
            # Reset the number of connection retries
            threadData.retriesCount = 0

        # fix for known issue when urllib2 just skips the other part of provided
        # url splitted with space char while urlencoding it in the later phase
        url = url.replace(" ", "%20")

        code = None
        page = None
        requestMsg = u"HTTP request [#%d]:\n%s " % (
            threadData.lastRequestUID, method or
            (HTTPMETHOD.POST if post else HTTPMETHOD.GET))
        requestMsg += ("%s" % urlparse.urlsplit(url)[2] or "/") if not any(
            (refreshing, crawling)) else url
        responseMsg = u"HTTP response "
        requestHeaders = u""
        responseHeaders = None
        logHeaders = u""
        skipLogTraffic = False

        raise404 = raise404 and not kb.ignoreNotFound

        # support for non-latin (e.g. cyrillic) URLs as urllib/urllib2 doesn't
        # support those by default
        url = asciifyUrl(url)

        # fix for known issues when using url in unicode format
        # (e.g. UnicodeDecodeError: "url = url + '?' + query" in redirect case)
        url = unicodeencode(url)

        try:
            if silent:
                socket.setdefaulttimeout(HTTP_SILENT_TIMEOUT)
            else:
                socket.setdefaulttimeout(conf.timeout)

            if direct:
                if "?" in url:
                    url, params = url.split("?")
                    params = urlencode(params)
                    url = "%s?%s" % (url, params)
                    requestMsg += "?%s" % params

            elif multipart:
                # Needed in this form because of potential circle dependency
                # problem (option -> update -> connect -> option)
                from lib.core.option import proxyHandler

                multipartOpener = urllib2.build_opener(
                    proxyHandler, multipartpost.MultipartPostHandler)
                conn = multipartOpener.open(unicodeencode(url), multipart)
                page = Connect.__connReadProxy(conn)
                responseHeaders = conn.info()
                responseHeaders[URI_HTTP_HEADER] = conn.geturl()
                page = decodePage(
                    page, responseHeaders.get(HTTPHEADER.CONTENT_ENCODING),
                    responseHeaders.get(HTTPHEADER.CONTENT_TYPE))

                return page

            elif any((refreshing, crawling)):
                pass

            elif target:
                if PLACE.GET in conf.parameters and not get:
                    get = conf.parameters[PLACE.GET]

                if get:
                    url = "%s?%s" % (url, get)
                    requestMsg += "?%s" % get

                if conf.method == HTTPMETHOD.POST and not post:
                    for place in (PLACE.POST, PLACE.SOAP):
                        if place in conf.parameters:
                            post = conf.parameters[place]
                            break

            elif get:
                url = "%s?%s" % (url, get)
                requestMsg += "?%s" % get

            requestMsg += " %s" % httplib.HTTPConnection._http_vsn_str

            # Prepare HTTP headers
            headers = forgeHeaders({
                HTTPHEADER.COOKIE: cookie,
                HTTPHEADER.USER_AGENT: ua,
                HTTPHEADER.REFERER: referer
            })

            if kb.authHeader:
                headers[HTTPHEADER.AUTHORIZATION] = kb.authHeader

            if kb.proxyAuthHeader:
                headers[HTTPHEADER.PROXY_AUTHORIZATION] = kb.proxyAuthHeader

            headers[HTTPHEADER.ACCEPT] = HTTP_ACCEPT_HEADER_VALUE
            headers[
                HTTPHEADER.
                ACCEPT_ENCODING] = HTTP_ACCEPT_ENCODING_HEADER_VALUE if method != HTTPMETHOD.HEAD else "identity"
            headers[HTTPHEADER.HOST] = host or getHostHeader(url)

            if auxHeaders:
                for key, item in auxHeaders.items():
                    headers[key] = item

            for key, item in headers.items():
                del headers[key]
                headers[unicodeencode(key, kb.pageEncoding)] = unicodeencode(
                    item, kb.pageEncoding)

            post = unicodeencode(post, kb.pageEncoding)

            if method:
                req = MethodRequest(url, post, headers)
                req.set_method(method)
            else:
                req = urllib2.Request(url, post, headers)

            requestHeaders += "\n".join(
                "%s: %s" %
                (key.capitalize() if isinstance(key, basestring) else key,
                 getUnicode(value)) for (key, value) in req.header_items())

            if not getRequestHeader(req, HTTPHEADER.COOKIE) and conf.cj:
                conf.cj._policy._now = conf.cj._now = int(time.time())
                cookies = conf.cj._cookies_for_request(req)
                requestHeaders += "\n%s" % ("Cookie: %s" % ";".join(
                    "%s=%s" %
                    (getUnicode(cookie.name), getUnicode(cookie.value))
                    for cookie in cookies))

            if post:
                if not getRequestHeader(req, HTTPHEADER.CONTENT_TYPE):
                    requestHeaders += "\n%s: %s" % (
                        string.capwords(HTTPHEADER.CONTENT_TYPE),
                        "application/x-www-form-urlencoded")

                if not getRequestHeader(req, HTTPHEADER.CONTENT_LENGTH):
                    requestHeaders += "\n%s: %d" % (string.capwords(
                        HTTPHEADER.CONTENT_LENGTH), len(post))

            if not getRequestHeader(req, HTTPHEADER.CONNECTION):
                requestHeaders += "\n%s: close" % HTTPHEADER.CONNECTION

            requestMsg += "\n%s" % requestHeaders

            if post:
                requestMsg += "\n\n%s" % getUnicode(post)

            requestMsg += "\n"

            threadData.lastRequestMsg = requestMsg

            logger.log(CUSTOM_LOGGING.TRAFFIC_OUT, requestMsg)

            conn = urllib2.urlopen(req)

            if not kb.authHeader and getRequestHeader(
                    req, HTTPHEADER.AUTHORIZATION):
                kb.authHeader = getRequestHeader(req, HTTPHEADER.AUTHORIZATION)

            if not kb.proxyAuthHeader and getRequestHeader(
                    req, HTTPHEADER.PROXY_AUTHORIZATION):
                kb.proxyAuthHeader = getRequestHeader(
                    req, HTTPHEADER.PROXY_AUTHORIZATION)

            # Return response object
            if response:
                return conn, None, None

            # Get HTTP response
            if hasattr(conn, 'redurl'):
                page = threadData.lastRedirectMsg[1] if kb.redirectChoice == REDIRECTION.NO\
                  else Connect.__connReadProxy(conn)
                skipLogTraffic = kb.redirectChoice == REDIRECTION.NO
                code = conn.redcode
            else:
                page = Connect.__connReadProxy(conn)

            code = code or conn.code
            responseHeaders = conn.info()
            responseHeaders[URI_HTTP_HEADER] = conn.geturl()
            page = decodePage(page,
                              responseHeaders.get(HTTPHEADER.CONTENT_ENCODING),
                              responseHeaders.get(HTTPHEADER.CONTENT_TYPE))
            status = getUnicode(conn.msg)

            if extractRegexResult(META_REFRESH_REGEX, page, re.DOTALL
                                  | re.IGNORECASE) and not refreshing:
                url = extractRegexResult(META_REFRESH_REGEX, page,
                                         re.DOTALL | re.IGNORECASE)

                debugMsg = "got HTML meta refresh header"
                logger.debug(debugMsg)

                if kb.alwaysRefresh is None:
                    msg = "sqlmap got a refresh request "
                    msg += "(redirect like response common to login pages). "
                    msg += "Do you want to apply the refresh "
                    msg += "from now on (or stay on the original page)? [Y/n]"
                    choice = readInput(msg, default="Y")

                    kb.alwaysRefresh = choice not in ("n", "N")

                if kb.alwaysRefresh:
                    if url.lower().startswith('http://'):
                        kwargs['url'] = url
                    else:
                        kwargs['url'] = conf.url[:conf.url.rfind('/') +
                                                 1] + url

                    threadData.lastRedirectMsg = (threadData.lastRequestUID,
                                                  page)
                    kwargs['refreshing'] = True
                    kwargs['get'] = None
                    kwargs['post'] = None

                    try:
                        return Connect.__getPageProxy(**kwargs)
                    except sqlmapSyntaxException:
                        pass

            # Explicit closing of connection object
            if not conf.keepAlive:
                try:
                    if hasattr(conn.fp, '_sock'):
                        conn.fp._sock.close()
                    conn.close()
                except Exception, msg:
                    warnMsg = "problem occured during connection closing ('%s')" % msg
                    logger.warn(warnMsg)

        except urllib2.HTTPError, e:
            page = None
            responseHeaders = None

            try:
                page = e.read()
                responseHeaders = e.info()
                responseHeaders[URI_HTTP_HEADER] = e.geturl()
                page = decodePage(
                    page, responseHeaders.get(HTTPHEADER.CONTENT_ENCODING),
                    responseHeaders.get(HTTPHEADER.CONTENT_TYPE))
            except socket.timeout:
                warnMsg = "connection timed out while trying "
                warnMsg += "to get error page information (%d)" % e.code
                logger.warn(warnMsg)
                return None, None, None
            except KeyboardInterrupt:
                raise
            except:
                pass
            finally:
                page = page if isinstance(page, unicode) else getUnicode(page)

            code = e.code
            threadData.lastHTTPError = (threadData.lastRequestUID, code)

            kb.httpErrorCodes[code] = kb.httpErrorCodes.get(code, 0) + 1

            status = getUnicode(e.msg)
            responseMsg += "[#%d] (%d %s):\n" % (threadData.lastRequestUID,
                                                 code, status)

            if responseHeaders:
                logHeaders = "\n".join(
                    "%s: %s" %
                    (key.capitalize() if isinstance(key, basestring) else key,
                     getUnicode(value))
                    for (key, value) in responseHeaders.items())

            logHTTPTraffic(
                requestMsg,
                "%s%s\n\n%s" % (responseMsg, logHeaders,
                                (page or "")[:MAX_CONNECTION_CHUNK_SIZE]))

            skipLogTraffic = True

            if conf.verbose <= 5:
                responseMsg += getUnicode(logHeaders)
            elif conf.verbose > 5:
                responseMsg += "%s\n\n%s" % (logHeaders,
                                             (page or
                                              "")[:MAX_CONNECTION_CHUNK_SIZE])

            logger.log(CUSTOM_LOGGING.TRAFFIC_IN, responseMsg)

            if e.code == httplib.UNAUTHORIZED:
                errMsg = "not authorized, try to provide right HTTP "
                errMsg += "authentication type and valid credentials (%d)" % code
                raise sqlmapConnectionException, errMsg
            elif e.code == httplib.NOT_FOUND:
                if raise404:
                    errMsg = "page not found (%d)" % code
                    raise sqlmapConnectionException, errMsg
                else:
                    debugMsg = "page not found (%d)" % code
                    logger.debug(debugMsg)
                    processResponse(page, responseHeaders)
            elif e.code == httplib.GATEWAY_TIMEOUT:
                if ignoreTimeout:
                    return None, None, None
                else:
                    warnMsg = "unable to connect to the target url (%d - %s)" % (
                        e.code, httplib.responses[e.code])
                    if threadData.retriesCount < conf.retries and not kb.threadException:
                        warnMsg += ", sqlmap is going to retry the request"
                        logger.critical(warnMsg)
                        return Connect.__retryProxy(**kwargs)
                    elif kb.testMode:
                        logger.critical(warnMsg)
                        return None, None, None
                    else:
                        raise sqlmapConnectionException, warnMsg
            else:
                debugMsg = "got HTTP error code: %d (%s)" % (code, status)
                logger.debug(debugMsg)
Пример #14
0
    def getPage(**kwargs):
        """
        This method connects to the target url or proxy and returns
        the target url page content
        """

        if conf.delay is not None and isinstance(conf.delay, (int, float)) and conf.delay > 0:
            time.sleep(conf.delay)
        elif conf.cpuThrottle:
            cpuThrottle(conf.cpuThrottle)

        threadData = getCurrentThreadData()
        threadData.lastRequestUID += 1

        url = kwargs.get('url',                     conf.url)
        get = kwargs.get('get',                     None)
        post = kwargs.get('post',                   None)
        method = kwargs.get('method',               None)
        cookie = kwargs.get('cookie',               None)
        ua = kwargs.get('ua',                       None)
        referer = kwargs.get('referer',             None)
        host = kwargs.get('host',                   conf.host)
        direct = kwargs.get('direct',               False)
        multipart = kwargs.get('multipart',         False)
        silent = kwargs.get('silent',               False)
        raise404 = kwargs.get('raise404',           True)
        auxHeaders = kwargs.get('auxHeaders',       None)
        response = kwargs.get('response',           False)
        ignoreTimeout = kwargs.get('ignoreTimeout', kb.ignoreTimeout)
        refreshing = kwargs.get('refreshing',       False)
        retrying = kwargs.get('retrying',           False)
        crawling = kwargs.get('crawling',           False)

        if not urlparse.urlsplit(url).netloc:
            url = urlparse.urljoin(conf.url, url)

        # flag to know if we are dealing with the same target host
        target = reduce(lambda x, y: x == y, map(lambda x: urlparse.urlparse(x).netloc.split(':')[0], [url, conf.url or ""]))

        if not retrying:
            # Reset the number of connection retries
            threadData.retriesCount = 0

        # fix for known issue when urllib2 just skips the other part of provided
        # url splitted with space char while urlencoding it in the later phase
        url = url.replace(" ", "%20")

        code = None
        page = None
        requestMsg = u"HTTP request [#%d]:\n%s " % (threadData.lastRequestUID, method or (HTTPMETHOD.POST if post else HTTPMETHOD.GET))
        requestMsg += "%s" % urlparse.urlsplit(url)[2] or "/"
        responseMsg = u"HTTP response "
        requestHeaders = u""
        responseHeaders = None
        logHeaders = u""
        skipLogTraffic = False

        raise404 = raise404 and not kb.ignoreNotFound

        # support for non-latin (e.g. cyrillic) URLs as urllib/urllib2 doesn't
        # support those by default
        url = asciifyUrl(url)

        # fix for known issues when using url in unicode format
        # (e.g. UnicodeDecodeError: "url = url + '?' + query" in redirect case)
        url = unicodeencode(url)

        try:
            if silent:
                socket.setdefaulttimeout(HTTP_SILENT_TIMEOUT)
            else:
                socket.setdefaulttimeout(conf.timeout)

            if direct:
                if "?" in url:
                    url, params = url.split("?")
                    params = urlencode(params)
                    url = "%s?%s" % (url, params)
                    requestMsg += "?%s" % params

            elif multipart:
                # Needed in this form because of potential circle dependency
                # problem (option -> update -> connect -> option)
                from lib.core.option import proxyHandler

                multipartOpener = urllib2.build_opener(proxyHandler, multipartpost.MultipartPostHandler)
                conn = multipartOpener.open(unicodeencode(url), multipart)
                page = Connect.__connReadProxy(conn)
                responseHeaders = conn.info()
                responseHeaders[URI_HTTP_HEADER] = conn.geturl()
                page = decodePage(page, responseHeaders.get(HTTPHEADER.CONTENT_ENCODING), responseHeaders.get(HTTPHEADER.CONTENT_TYPE))

                return page

            elif any ([refreshing, crawling]):
                pass

            elif target:
                if conf.parameters.has_key(PLACE.GET) and not get:
                    get = conf.parameters[PLACE.GET]

                if get:
                    url = "%s?%s" % (url, get)
                    requestMsg += "?%s" % get

                if conf.method == HTTPMETHOD.POST and not post:
                    for place in (PLACE.POST, PLACE.SOAP):
                        if conf.parameters.has_key(place):
                            post = conf.parameters[place]
                            break

            elif get:
                url = "%s?%s" % (url, get)
                requestMsg += "?%s" % get

            requestMsg += " %s" % httplib.HTTPConnection._http_vsn_str

            # Prepare HTTP headers
            headers = forgeHeaders({ HTTPHEADER.COOKIE: cookie, HTTPHEADER.USER_AGENT: ua, HTTPHEADER.REFERER: referer })

            if conf.realTest:
                headers[HTTPHEADER.REFERER] = "%s://%s" % (conf.scheme, conf.hostname)

            if kb.authHeader:
                headers[HTTPHEADER.AUTHORIZATION] = kb.authHeader

            if kb.proxyAuthHeader:
                headers[HTTPHEADER.PROXY_AUTHORIZATION] = kb.proxyAuthHeader

            headers[HTTPHEADER.ACCEPT] = HTTP_ACCEPT_HEADER_VALUE

            headers[HTTPHEADER.HOST] = host or getHostHeader(url)

            if auxHeaders:
                for key, item in auxHeaders.items():
                    headers[key] = item

            for key, item in headers.items():
                del headers[key]
                headers[unicodeencode(key, kb.pageEncoding)] = unicodeencode(item, kb.pageEncoding)

            post = unicodeencode(post, kb.pageEncoding)

            if method:
                req = MethodRequest(url, post, headers)
                req.set_method(method)
            else:
                req = urllib2.Request(url, post, headers)

            if not req.has_header(HTTPHEADER.ACCEPT_ENCODING):
                requestHeaders += "%s: identity\n" % HTTPHEADER.ACCEPT_ENCODING

            requestHeaders += "\n".join("%s: %s" % (key.capitalize() if isinstance(key, basestring) else key, getUnicode(value)) for (key, value) in req.header_items())

            if not req.has_header(HTTPHEADER.COOKIE) and conf.cj:
                conf.cj._policy._now = conf.cj._now = int(time.time())
                cookies = conf.cj._cookies_for_request(req)
                requestHeaders += "\n%s" % ("Cookie: %s" % ";".join("%s=%s" % (getUnicode(cookie.name), getUnicode(cookie.value)) for cookie in cookies))

            if not req.has_header(HTTPHEADER.CONNECTION):
                requestHeaders += "\n%s: close" % HTTPHEADER.CONNECTION

            requestMsg += "\n%s" % requestHeaders

            if post:
                requestMsg += "\n\n%s" % getUnicode(post)

            requestMsg += "\n"

            threadData.lastRequestMsg = requestMsg

            logger.log(CUSTOM_LOGGING.TRAFFIC_OUT, requestMsg)

            conn = urllib2.urlopen(req)

            if not kb.authHeader and req.has_header(HTTPHEADER.AUTHORIZATION):
                kb.authHeader = req.get_header(HTTPHEADER.AUTHORIZATION)

            if not kb.proxyAuthHeader and req.has_header(HTTPHEADER.PROXY_AUTHORIZATION):
                kb.proxyAuthHeader = req.get_header(HTTPHEADER.PROXY_AUTHORIZATION)

            # Return response object
            if response:
                return conn, None, None

            # Get HTTP response
            if hasattr(conn, 'redurl'):
                page = threadData.lastRedirectMsg[1] if kb.redirectChoice == REDIRECTION.NO\
                  else Connect.__connReadProxy(conn)
                skipLogTraffic = kb.redirectChoice == REDIRECTION.NO
                code = conn.redcode
            else:
                page = Connect.__connReadProxy(conn)

            code = code or conn.code
            responseHeaders = conn.info()
            responseHeaders[URI_HTTP_HEADER] = conn.geturl()
            page = decodePage(page, responseHeaders.get(HTTPHEADER.CONTENT_ENCODING), responseHeaders.get(HTTPHEADER.CONTENT_TYPE))
            status = getUnicode(conn.msg)

            if extractRegexResult(META_REFRESH_REGEX, page, re.DOTALL | re.IGNORECASE) and not refreshing:
                url = extractRegexResult(META_REFRESH_REGEX, page, re.DOTALL | re.IGNORECASE)

                debugMsg = "got HTML meta refresh header"
                logger.debug(debugMsg)

                if kb.alwaysRefresh is None:
                    msg = "sqlmap got a refresh request "
                    msg += "(redirect like response common to login pages). "
                    msg += "Do you want to apply the refresh "
                    msg += "from now on (or stay on the original page)? [Y/n]"
                    choice = readInput(msg, default="Y")

                    kb.alwaysRefresh = choice not in ("n", "N")

                if kb.alwaysRefresh:
                    if url.lower().startswith('http://'):
                        kwargs['url'] = url
                    else:
                        kwargs['url'] = conf.url[:conf.url.rfind('/')+1] + url

                    threadData.lastRedirectMsg = (threadData.lastRequestUID, page)
                    kwargs['refreshing'] = True
                    kwargs['get'] = None
                    kwargs['post'] = None

                    try:
                        return Connect.__getPageProxy(**kwargs)
                    except sqlmapSyntaxException:
                        pass

            # Explicit closing of connection object
            if not conf.keepAlive:
                try:
                    if hasattr(conn.fp, '_sock'):
                        conn.fp._sock.close()
                    conn.close()
                except Exception, msg:
                    warnMsg = "problem occured during connection closing ('%s')" % msg
                    logger.warn(warnMsg)

        except urllib2.HTTPError, e:
            page = None
            responseHeaders = None

            try:
                page = e.read()
                responseHeaders = e.info()
                responseHeaders[URI_HTTP_HEADER] = e.geturl()
                page = decodePage(page, responseHeaders.get(HTTPHEADER.CONTENT_ENCODING), responseHeaders.get(HTTPHEADER.CONTENT_TYPE))
            except socket.timeout:
                warnMsg = "connection timed out while trying "
                warnMsg += "to get error page information (%d)" % e.code
                logger.warn(warnMsg)
                return None, None, None
            except KeyboardInterrupt:
                raise
            except:
                pass
            finally:
                page = page if isinstance(page, unicode) else getUnicode(page)

            code = e.code
            threadData.lastHTTPError = (threadData.lastRequestUID, code)

            kb.httpErrorCodes[code] = kb.httpErrorCodes.get(code, 0) + 1

            status = getUnicode(e.msg)
            responseMsg += "[#%d] (%d %s):\n" % (threadData.lastRequestUID, code, status)

            if responseHeaders:
                logHeaders = "\n".join("%s: %s" % (key.capitalize() if isinstance(key, basestring) else key, getUnicode(value)) for (key, value) in responseHeaders.items())

            logHTTPTraffic(requestMsg, "%s%s\n\n%s" % (responseMsg, logHeaders, page))

            skipLogTraffic = True

            if conf.verbose <= 5:
                responseMsg += getUnicode(logHeaders)
            elif conf.verbose > 5:
                responseMsg += "%s\n\n%s\n" % (logHeaders, page)

            logger.log(CUSTOM_LOGGING.TRAFFIC_IN, responseMsg)

            if e.code == httplib.UNAUTHORIZED:
                errMsg = "not authorized, try to provide right HTTP "
                errMsg += "authentication type and valid credentials (%d)" % code
                raise sqlmapConnectionException, errMsg
            elif e.code == httplib.NOT_FOUND:
                if raise404:
                    errMsg = "page not found (%d)" % code
                    raise sqlmapConnectionException, errMsg
                else:
                    debugMsg = "page not found (%d)" % code
                    logger.debug(debugMsg)
                    processResponse(page, responseHeaders)
            elif e.code == httplib.GATEWAY_TIMEOUT:
                if ignoreTimeout:
                    return None, None, None
                else:
                    warnMsg = "unable to connect to the target url (%d - %s)" % (e.code, httplib.responses[e.code])
                    if threadData.retriesCount < conf.retries and not kb.threadException and not conf.realTest:
                        warnMsg += ", sqlmap is going to retry the request"
                        logger.critical(warnMsg)
                        return Connect.__retryProxy(**kwargs)
                    elif kb.testMode:
                        logger.critical(warnMsg)
                        return None, None, None
                    else:
                        raise sqlmapConnectionException, warnMsg
            else:
                debugMsg = "got HTTP error code: %d (%s)" % (code, status)
                logger.debug(debugMsg)
Пример #15
0
    url += "num=100&hl=en&complete=0&safe=off&filter=0&btnG=Search"
    url += "&start=%d" % ((gpage - 1) * 100)

    try:
        req = urllib2.Request(url, headers=headers)
        conn = urllib2.urlopen(req)

        requestMsg = "HTTP request:\nGET %s" % url
        requestMsg += " %s" % httplib.HTTPConnection._http_vsn_str
        logger.log(CUSTOM_LOGGING.TRAFFIC_OUT, requestMsg)

        page = conn.read()
        code = conn.code
        status = conn.msg
        responseHeaders = conn.info()
        page = decodePage(page, responseHeaders.get("Content-Encoding"), responseHeaders.get("Content-Type"))

        responseMsg = "HTTP response (%s - %d):\n" % (status, code)

        if conf.verbose <= 4:
            responseMsg += getUnicode(responseHeaders, UNICODE_ENCODING)
        elif conf.verbose > 4:
            responseMsg += "%s\n%s\n" % (responseHeaders, page)

        logger.log(CUSTOM_LOGGING.TRAFFIC_IN, responseMsg)
    except urllib2.HTTPError, e:
        try:
            page = e.read()
        except Exception, ex:
            warnMsg = "problem occurred while trying to get "
            warnMsg += "an error page information (%s)" % getSafeExString(ex)
Пример #16
0
        try:
            content = fp.read(MAX_CONNECTION_TOTAL_SIZE)
        except Exception, msg:
            dbgMsg = "there was a problem while retrieving "
            dbgMsg += "redirect response content (%s)" % msg
            logger.debug(dbgMsg)
        finally:
            if content:
                try:  # try to write it back to the read buffer so we could reuse it in further steps
                    fp.fp._rbuf.truncate(0)
                    fp.fp._rbuf.write(content)
                except:
                    pass

        content = decodePage(content, headers.get(HTTP_HEADER.CONTENT_ENCODING), headers.get(HTTP_HEADER.CONTENT_TYPE))

        threadData = getCurrentThreadData()
        threadData.lastRedirectMsg = (threadData.lastRequestUID, content)

        redirectMsg = "HTTP redirect "
        redirectMsg += "[#%d] (%d %s):\n" % (threadData.lastRequestUID, code, getUnicode(msg))

        if headers:
            logHeaders = "\n".join("%s: %s" % (getUnicode(key.capitalize() if isinstance(key, basestring) else key), getUnicode(value)) for (key, value) in headers.items())
        else:
            logHeaders = ""

        redirectMsg += logHeaders
        if content:
            redirectMsg += "\n\n%s" % getUnicode(content[:MAX_CONNECTION_CHUNK_SIZE])
Пример #17
0
        try:
            content = fp.read(MAX_CONNECTION_TOTAL_SIZE)
        except Exception, msg:
            dbgMsg = "there was a problem while retrieving "
            dbgMsg += "redirect response content (%s)" % msg
            logger.debug(dbgMsg)
        finally:
            if content:
                try:  # try to write it back to the read buffer so we could reuse it in further steps
                    fp.fp._rbuf.truncate(0)
                    fp.fp._rbuf.write(content)
                except:
                    pass

        content = decodePage(content,
                             headers.get(HTTP_HEADER.CONTENT_ENCODING),
                             headers.get(HTTP_HEADER.CONTENT_TYPE))

        threadData = getCurrentThreadData()
        threadData.lastRedirectMsg = (threadData.lastRequestUID, content)

        redirectMsg = "HTTP redirect "
        redirectMsg += "[#%d] (%d %s):\n" % (threadData.lastRequestUID, code,
                                             getUnicode(msg))

        if headers:
            logHeaders = "\n".join("%s: %s" % (getUnicode(key.capitalize(
            ) if isinstance(key, basestring) else key), getUnicode(value))
                                   for (key, value) in headers.items())
        else:
            logHeaders = ""
Пример #18
0
def _search(dork):
    """
    This method performs the effective search on Google providing
    the google dork and the Google session cookie
    """

    if not dork:
        return None

    data = None
    headers = {}

    headers[HTTP_HEADER.USER_AGENT] = dict(conf.httpHeaders).get(HTTP_HEADER.USER_AGENT, DUMMY_SEARCH_USER_AGENT)
    headers[HTTP_HEADER.ACCEPT_ENCODING] = HTTP_ACCEPT_ENCODING_HEADER_VALUE

    try:
        req = _urllib.request.Request("https://www.google.com/ncr", headers=headers)
        conn = _urllib.request.urlopen(req)
    except Exception as ex:
        errMsg = "unable to connect to Google ('%s')" % getSafeExString(ex)
        raise SqlmapConnectionException(errMsg)

    gpage = conf.googlePage if conf.googlePage > 1 else 1
    logger.info("using search result page #%d" % gpage)

    url = "https://www.google.com/search?"
    url += "q=%s&" % urlencode(dork, convall=True)
    url += "num=100&hl=en&complete=0&safe=off&filter=0&btnG=Search"
    url += "&start=%d" % ((gpage - 1) * 100)

    try:
        req = _urllib.request.Request(url, headers=headers)
        conn = _urllib.request.urlopen(req)

        requestMsg = "HTTP request:\nGET %s" % url
        requestMsg += " %s" % _http_client.HTTPConnection._http_vsn_str
        logger.log(CUSTOM_LOGGING.TRAFFIC_OUT, requestMsg)

        page = conn.read()
        code = conn.code
        status = conn.msg
        responseHeaders = conn.info()
        page = decodePage(page, responseHeaders.get("Content-Encoding"), responseHeaders.get("Content-Type"))

        responseMsg = "HTTP response (%s - %d):\n" % (status, code)

        if conf.verbose <= 4:
            responseMsg += getUnicode(responseHeaders, UNICODE_ENCODING)
        elif conf.verbose > 4:
            responseMsg += "%s\n%s\n" % (responseHeaders, page)

        logger.log(CUSTOM_LOGGING.TRAFFIC_IN, responseMsg)
    except _urllib.error.HTTPError as ex:
        try:
            page = ex.read()
        except Exception as _:
            warnMsg = "problem occurred while trying to get "
            warnMsg += "an error page information (%s)" % getSafeExString(_)
            logger.critical(warnMsg)
            return None
    except (_urllib.error.URLError, _http_client.error, socket.error, socket.timeout, socks.ProxyError):
        errMsg = "unable to connect to Google"
        raise SqlmapConnectionException(errMsg)

    retVal = [_urllib.parse.unquote(match.group(1) or match.group(2)) for match in re.finditer(GOOGLE_REGEX, page, re.I)]

    if not retVal and "detected unusual traffic" in page:
        warnMsg = "Google has detected 'unusual' traffic from "
        warnMsg += "used IP address disabling further searches"

        if conf.proxyList:
            raise SqlmapBaseException(warnMsg)
        else:
            logger.critical(warnMsg)

    if not retVal:
        message = "no usable links found. What do you want to do?"
        message += "\n[1] (re)try with DuckDuckGo (default)"
        message += "\n[2] (re)try with Bing"
        message += "\n[3] quit"
        choice = readInput(message, default='1')

        if choice == '3':
            raise SqlmapUserQuitException
        elif choice == '2':
            url = "https://www.bing.com/search?q=%s&first=%d" % (urlencode(dork, convall=True), (gpage - 1) * 10 + 1)
            regex = BING_REGEX
        else:
            url = "https://duckduckgo.com/html/"
            data = "q=%s&s=%d" % (urlencode(dork, convall=True), (gpage - 1) * 30)
            regex = DUCKDUCKGO_REGEX

        try:
            req = _urllib.request.Request(url, data=data, headers=headers)
            conn = _urllib.request.urlopen(req)

            requestMsg = "HTTP request:\nGET %s" % url
            requestMsg += " %s" % _http_client.HTTPConnection._http_vsn_str
            logger.log(CUSTOM_LOGGING.TRAFFIC_OUT, requestMsg)

            page = conn.read()
            code = conn.code
            status = conn.msg
            responseHeaders = conn.info()
            page = decodePage(page, responseHeaders.get("Content-Encoding"), responseHeaders.get("Content-Type"))

            responseMsg = "HTTP response (%s - %d):\n" % (status, code)

            if conf.verbose <= 4:
                responseMsg += getUnicode(responseHeaders, UNICODE_ENCODING)
            elif conf.verbose > 4:
                responseMsg += "%s\n%s\n" % (responseHeaders, page)

            logger.log(CUSTOM_LOGGING.TRAFFIC_IN, responseMsg)
        except _urllib.error.HTTPError as ex:
            try:
                page = ex.read()
                page = decodePage(page, ex.headers.get("Content-Encoding"), ex.headers.get("Content-Type"))
            except socket.timeout:
                warnMsg = "connection timed out while trying "
                warnMsg += "to get error page information (%d)" % ex.code
                logger.critical(warnMsg)
                return None
        except:
            errMsg = "unable to connect"
            raise SqlmapConnectionException(errMsg)

        retVal = [_urllib.parse.unquote(match.group(1).replace("&amp;", "&")) for match in re.finditer(regex, page, re.I | re.S)]

        if not retVal and "issue with the Tor Exit Node you are currently using" in page:
            warnMsg = "DuckDuckGo has detected 'unusual' traffic from "
            warnMsg += "used (Tor) IP address"

            if conf.proxyList:
                raise SqlmapBaseException(warnMsg)
            else:
                logger.critical(warnMsg)

    return retVal
Пример #19
0
    def getPage(**kwargs):
        """
        This method connects to the target URL or proxy and returns
        the target URL page content
        """

        if conf.delay is not None and isinstance(
                conf.delay, (int, float)) and conf.delay > 0:
            time.sleep(conf.delay)
        elif conf.cpuThrottle:
            cpuThrottle(conf.cpuThrottle)

        if conf.dummy:
            return randomStr(int(randomInt()),
                             alphabet=[chr(_) for _ in xrange(256)
                                       ]), {}, int(randomInt())

        threadData = getCurrentThreadData()
        with kb.locks.request:
            kb.requestCounter += 1
            threadData.lastRequestUID = kb.requestCounter

        url = kwargs.get("url", None) or conf.url
        get = kwargs.get("get", None)
        post = kwargs.get("post", None)
        method = kwargs.get("method", None)
        cookie = kwargs.get("cookie", None)
        ua = kwargs.get("ua", None) or conf.agent
        referer = kwargs.get("referer", None) or conf.referer
        host = kwargs.get("host", None) or conf.host
        direct_ = kwargs.get("direct", False)
        multipart = kwargs.get("multipart", False)
        silent = kwargs.get("silent", False)
        raise404 = kwargs.get("raise404", True)
        timeout = kwargs.get("timeout", None) or conf.timeout
        auxHeaders = kwargs.get("auxHeaders", None)
        response = kwargs.get("response", False)
        ignoreTimeout = kwargs.get("ignoreTimeout", False) or kb.ignoreTimeout
        refreshing = kwargs.get("refreshing", False)
        retrying = kwargs.get("retrying", False)
        crawling = kwargs.get("crawling", False)
        skipRead = kwargs.get("skipRead", False)

        if not urlparse.urlsplit(url).netloc:
            url = urlparse.urljoin(conf.url, url)

        # flag to know if we are dealing with the same target host
        target = reduce(
            lambda x, y: x == y,
            map(lambda x: urlparse.urlparse(x).netloc.split(':')[0],
                [url, conf.url or ""]))

        if not retrying:
            # Reset the number of connection retries
            threadData.retriesCount = 0

        # fix for known issue when urllib2 just skips the other part of provided
        # url splitted with space char while urlencoding it in the later phase
        url = url.replace(" ", "%20")

        conn = None
        code = None
        page = None

        _ = urlparse.urlsplit(url)
        requestMsg = u"HTTP request [#%d]:\n%s " % (
            threadData.lastRequestUID, method or
            (HTTPMETHOD.POST if post is not None else HTTPMETHOD.GET))
        requestMsg += ("%s%s" %
                       (_.path or "/",
                        ("?%s" % _.query) if _.query else "")) if not any(
                            (refreshing, crawling)) else url
        responseMsg = u"HTTP response "
        requestHeaders = u""
        responseHeaders = None
        logHeaders = u""
        skipLogTraffic = False

        raise404 = raise404 and not kb.ignoreNotFound

        # support for non-latin (e.g. cyrillic) URLs as urllib/urllib2 doesn't
        # support those by default
        url = asciifyUrl(url)

        # fix for known issues when using url in unicode format
        # (e.g. UnicodeDecodeError: "url = url + '?' + query" in redirect case)
        url = unicodeencode(url)

        try:
            socket.setdefaulttimeout(timeout)

            if direct_:
                if '?' in url:
                    url, params = url.split('?', 1)
                    params = urlencode(params)
                    url = "%s?%s" % (url, params)
                    requestMsg += "?%s" % params

            elif multipart:
                # Needed in this form because of potential circle dependency
                # problem (option -> update -> connect -> option)
                from lib.core.option import proxyHandler

                multipartOpener = urllib2.build_opener(
                    proxyHandler, multipartpost.MultipartPostHandler)
                conn = multipartOpener.open(unicodeencode(url), multipart)
                page = Connect._connReadProxy(conn) if not skipRead else None
                responseHeaders = conn.info()
                responseHeaders[URI_HTTP_HEADER] = conn.geturl()
                page = decodePage(
                    page, responseHeaders.get(HTTP_HEADER.CONTENT_ENCODING),
                    responseHeaders.get(HTTP_HEADER.CONTENT_TYPE))

                return page

            elif any((refreshing, crawling)):
                pass

            elif target:
                if conf.forceSSL and urlparse.urlparse(url).scheme != "https":
                    url = re.sub("\Ahttp:", "https:", url, re.I)
                    url = re.sub(":80/", ":443/", url, re.I)

                if PLACE.GET in conf.parameters and not get:
                    get = conf.parameters[PLACE.GET]

                    if not conf.skipUrlEncode:
                        get = urlencode(get, limit=True)

                if get:
                    url = "%s?%s" % (url, get)
                    requestMsg += "?%s" % get

                if PLACE.POST in conf.parameters and not post and method in (
                        None, HTTPMETHOD.POST):
                    post = conf.parameters[PLACE.POST]

            elif get:
                url = "%s?%s" % (url, get)
                requestMsg += "?%s" % get

            requestMsg += " %s" % httplib.HTTPConnection._http_vsn_str

            # Prepare HTTP headers
            headers = forgeHeaders({
                HTTP_HEADER.COOKIE: cookie,
                HTTP_HEADER.USER_AGENT: ua,
                HTTP_HEADER.REFERER: referer
            })

            if kb.authHeader:
                headers[HTTP_HEADER.AUTHORIZATION] = kb.authHeader

            if kb.proxyAuthHeader:
                headers[HTTP_HEADER.PROXY_AUTHORIZATION] = kb.proxyAuthHeader

            headers[HTTP_HEADER.ACCEPT] = HTTP_ACCEPT_HEADER_VALUE
            headers[
                HTTP_HEADER.
                ACCEPT_ENCODING] = HTTP_ACCEPT_ENCODING_HEADER_VALUE if kb.pageCompress else "identity"
            headers[HTTP_HEADER.HOST] = host or getHostHeader(url)

            if post is not None and HTTP_HEADER.CONTENT_TYPE not in headers:
                headers[
                    HTTP_HEADER.CONTENT_TYPE] = POST_HINT_CONTENT_TYPES.get(
                        kb.postHint, DEFAULT_CONTENT_TYPE)

            if headers.get(
                    HTTP_HEADER.CONTENT_TYPE) == POST_HINT_CONTENT_TYPES[
                        POST_HINT.MULTIPART]:
                warnMsg = "missing 'boundary parameter' in '%s' header. " % HTTP_HEADER.CONTENT_TYPE
                warnMsg += "Will try to reconstruct"
                singleTimeWarnMessage(warnMsg)

                boundary = findMultipartPostBoundary(conf.data)
                if boundary:
                    headers[HTTP_HEADER.CONTENT_TYPE] = "%s; boundary=%s" % (
                        headers[HTTP_HEADER.CONTENT_TYPE], boundary)

            if auxHeaders:
                for key, item in auxHeaders.items():
                    headers[key] = item

            for key, item in headers.items():
                del headers[key]
                headers[unicodeencode(key, kb.pageEncoding)] = unicodeencode(
                    item, kb.pageEncoding)

            post = unicodeencode(post, kb.pageEncoding)

            if method:
                req = MethodRequest(url, post, headers)
                req.set_method(method)
            else:
                req = urllib2.Request(url, post, headers)

            requestHeaders += "\n".join(
                "%s: %s" %
                (key.capitalize() if isinstance(key, basestring) else key,
                 getUnicode(value)) for (key, value) in req.header_items())

            if not getRequestHeader(req, HTTP_HEADER.COOKIE) and conf.cj:
                conf.cj._policy._now = conf.cj._now = int(time.time())
                cookies = conf.cj._cookies_for_request(req)
                requestHeaders += "\n%s" % ("Cookie: %s" % ";".join(
                    "%s=%s" %
                    (getUnicode(cookie.name), getUnicode(cookie.value))
                    for cookie in cookies))

            if post is not None:
                if not getRequestHeader(req, HTTP_HEADER.CONTENT_LENGTH):
                    requestHeaders += "\n%s: %d" % (string.capwords(
                        HTTP_HEADER.CONTENT_LENGTH), len(post))

            if not getRequestHeader(req, HTTP_HEADER.CONNECTION):
                requestHeaders += "\n%s: close" % HTTP_HEADER.CONNECTION

            requestMsg += "\n%s" % requestHeaders

            if post is not None:
                requestMsg += "\n\n%s" % getUnicode(post)

            requestMsg += "\n"

            threadData.lastRequestMsg = requestMsg

            logger.log(CUSTOM_LOGGING.TRAFFIC_OUT, requestMsg)

            conn = urllib2.urlopen(req)

            if not kb.authHeader and getRequestHeader(
                    req, HTTP_HEADER.AUTHORIZATION
            ) and conf.authType == AUTH_TYPE.BASIC:
                kb.authHeader = getRequestHeader(req,
                                                 HTTP_HEADER.AUTHORIZATION)

            if not kb.proxyAuthHeader and getRequestHeader(
                    req, HTTP_HEADER.PROXY_AUTHORIZATION):
                kb.proxyAuthHeader = getRequestHeader(
                    req, HTTP_HEADER.PROXY_AUTHORIZATION)

            # Return response object
            if response:
                return conn, None, None

            # Get HTTP response
            if hasattr(conn, 'redurl'):
                page = (threadData.lastRedirectMsg[1] if kb.redirectChoice == REDIRECTION.NO\
                  else Connect._connReadProxy(conn)) if not skipRead else None
                skipLogTraffic = kb.redirectChoice == REDIRECTION.NO
                code = conn.redcode
            else:
                page = Connect._connReadProxy(conn) if not skipRead else None

            code = code or conn.code
            responseHeaders = conn.info()
            responseHeaders[URI_HTTP_HEADER] = conn.geturl()
            page = decodePage(
                page, responseHeaders.get(HTTP_HEADER.CONTENT_ENCODING),
                responseHeaders.get(HTTP_HEADER.CONTENT_TYPE))
            status = getUnicode(conn.msg)

            if extractRegexResult(META_REFRESH_REGEX, page) and not refreshing:
                url = extractRegexResult(META_REFRESH_REGEX, page)

                debugMsg = "got HTML meta refresh header"
                logger.debug(debugMsg)

                if kb.alwaysRefresh is None:
                    msg = "sqlmap got a refresh request "
                    msg += "(redirect like response common to login pages). "
                    msg += "Do you want to apply the refresh "
                    msg += "from now on (or stay on the original page)? [Y/n]"
                    choice = readInput(msg, default="Y")

                    kb.alwaysRefresh = choice not in ("n", "N")

                if kb.alwaysRefresh:
                    if url.lower().startswith('http://'):
                        kwargs['url'] = url
                    else:
                        kwargs['url'] = conf.url[:conf.url.rfind('/') +
                                                 1] + url

                    threadData.lastRedirectMsg = (threadData.lastRequestUID,
                                                  page)
                    kwargs['refreshing'] = True
                    kwargs['get'] = None
                    kwargs['post'] = None

                    try:
                        return Connect._getPageProxy(**kwargs)
                    except SqlmapSyntaxException:
                        pass

            # Explicit closing of connection object
            if not conf.keepAlive:
                try:
                    if hasattr(conn.fp, '_sock'):
                        conn.fp._sock.close()
                    conn.close()
                except Exception, msg:
                    warnMsg = "problem occurred during connection closing ('%s')" % msg
                    logger.warn(warnMsg)

        except urllib2.HTTPError, e:
            page = None
            responseHeaders = None

            try:
                page = e.read() if not skipRead else None
                responseHeaders = e.info()
                responseHeaders[URI_HTTP_HEADER] = e.geturl()
                page = decodePage(
                    page, responseHeaders.get(HTTP_HEADER.CONTENT_ENCODING),
                    responseHeaders.get(HTTP_HEADER.CONTENT_TYPE))
            except socket.timeout:
                warnMsg = "connection timed out while trying "
                warnMsg += "to get error page information (%d)" % e.code
                logger.warn(warnMsg)
                return None, None, None
            except KeyboardInterrupt:
                raise
            except:
                pass
            finally:
                page = page if isinstance(page, unicode) else getUnicode(page)

            code = e.code
            threadData.lastHTTPError = (threadData.lastRequestUID, code)

            kb.httpErrorCodes[code] = kb.httpErrorCodes.get(code, 0) + 1

            status = getUnicode(e.msg)
            responseMsg += "[#%d] (%d %s):\n" % (threadData.lastRequestUID,
                                                 code, status)

            if responseHeaders:
                logHeaders = "\n".join("%s: %s" % (getUnicode(key.capitalize(
                ) if isinstance(key, basestring) else key), getUnicode(value))
                                       for (key,
                                            value) in responseHeaders.items())

            logHTTPTraffic(
                requestMsg,
                "%s%s\n\n%s" % (responseMsg, logHeaders,
                                (page or "")[:MAX_CONNECTION_CHUNK_SIZE]))

            skipLogTraffic = True

            if conf.verbose <= 5:
                responseMsg += getUnicode(logHeaders)
            elif conf.verbose > 5:
                responseMsg += "%s\n\n%s" % (logHeaders,
                                             (page or
                                              "")[:MAX_CONNECTION_CHUNK_SIZE])

            logger.log(CUSTOM_LOGGING.TRAFFIC_IN, responseMsg)

            if e.code == httplib.UNAUTHORIZED:
                errMsg = "not authorized, try to provide right HTTP "
                errMsg += "authentication type and valid credentials (%d)" % code
                raise SqlmapConnectionException(errMsg)
            elif e.code == httplib.NOT_FOUND:
                if raise404:
                    errMsg = "page not found (%d)" % code
                    raise SqlmapConnectionException(errMsg)
                else:
                    debugMsg = "page not found (%d)" % code
                    singleTimeLogMessage(debugMsg, logging.DEBUG)
                    processResponse(page, responseHeaders)
            elif e.code == httplib.GATEWAY_TIMEOUT:
                if ignoreTimeout:
                    return None, None, None
                else:
                    warnMsg = "unable to connect to the target URL (%d - %s)" % (
                        e.code, httplib.responses[e.code])
                    if threadData.retriesCount < conf.retries and not kb.threadException:
                        warnMsg += ". sqlmap is going to retry the request"
                        logger.critical(warnMsg)
                        return Connect._retryProxy(**kwargs)
                    elif kb.testMode:
                        logger.critical(warnMsg)
                        return None, None, None
                    else:
                        raise SqlmapConnectionException(warnMsg)
            else:
                debugMsg = "got HTTP error code: %d (%s)" % (code, status)
                logger.debug(debugMsg)
Пример #20
0
    def getPage(**kwargs):
        """
        This method connects to the target url or proxy and returns
        the target url page content
        """

        if conf.delay is not None and isinstance(
                conf.delay, (int, float)) and conf.delay > 0:
            time.sleep(conf.delay)
        elif conf.cpuThrottle:
            cpuThrottle(conf.cpuThrottle)

        threadData = getCurrentThreadData()
        threadData.lastRequestUID += 1

        # dirty hack because urllib2 just skips the other part of provided url
        # splitted with space char while urlencoding it in the later phase
        url = kwargs.get('url', conf.url).replace(" ", "%20")

        get = kwargs.get('get', None)
        post = kwargs.get('post', None)
        method = kwargs.get('method', None)
        cookie = kwargs.get('cookie', None)
        ua = kwargs.get('ua', None)
        referer = kwargs.get('referer', None)
        direct = kwargs.get('direct', False)
        multipart = kwargs.get('multipart', False)
        silent = kwargs.get('silent', False)
        raise404 = kwargs.get('raise404', True)
        auxHeaders = kwargs.get('auxHeaders', None)
        response = kwargs.get('response', False)
        ignoreTimeout = kwargs.get('ignoreTimeout', False)
        refreshing = kwargs.get('refreshing', False)

        page = ""
        cookieStr = ""
        requestMsg = "HTTP request [#%d]:\n%s " % (threadData.lastRequestUID,
                                                   conf.method)
        requestMsg += "%s" % urlparse.urlsplit(url)[2] or "/"
        responseMsg = "HTTP response "
        requestHeaders = ""
        responseHeaders = None
        logHeaders = ""

        try:
            if silent:
                socket.setdefaulttimeout(HTTP_SILENT_TIMEOUT)

            if direct:
                if "?" in url:
                    url, params = url.split("?")
                    params = urlencode(params)
                    url = "%s?%s" % (url, params)
                    requestMsg += "?%s" % params

            elif multipart:
                # Needed in this form because of potential circle dependency
                # problem (option -> update -> connect -> option)
                from lib.core.option import proxyHandler

                multipartOpener = urllib2.build_opener(
                    proxyHandler, multipartpost.MultipartPostHandler)
                conn = multipartOpener.open(url, multipart)
                page = conn.read()
                responseHeaders = conn.info()
                responseHeaders[URI_HTTP_HEADER] = conn.geturl()
                page = decodePage(
                    page, responseHeaders.get(HTTPHEADER.CONTENT_ENCODING),
                    responseHeaders.get(HTTPHEADER.CONTENT_TYPE))

                return page

            elif refreshing:
                # Reference(s):
                # http://vancouver-webpages.com/META/metatags.detail.html
                # http://webdesign.about.com/od/metataglibraries/a/aa080300a.htm
                get = None
                post = None

            else:
                if conf.parameters.has_key(PLACE.GET) and not get:
                    get = conf.parameters[PLACE.GET]

                if get:
                    url = "%s?%s" % (url, get)
                    requestMsg += "?%s" % get

                if conf.method == HTTPMETHOD.POST:
                    if conf.parameters.has_key(PLACE.POST) and not post:
                        post = conf.parameters[PLACE.POST]

            requestMsg += " %s" % httplib.HTTPConnection._http_vsn_str

            # Perform HTTP request
            headers = forgeHeaders(cookie, ua, referer)

            if conf.realTest:
                headers[HTTPHEADER.REFERER] = "%s://%s" % (conf.scheme,
                                                           conf.hostname)

            if kb.authHeader:
                headers[HTTPHEADER.AUTHORIZATION] = kb.authHeader

            if kb.proxyAuthHeader:
                headers[HTTPHEADER.PROXY_AUTHORIZATION] = kb.proxyAuthHeader

            if auxHeaders:
                for key, item in auxHeaders.items():
                    headers[key] = item

            for key, item in headers.items():
                del headers[key]
                headers[encodeUnicode(key, kb.pageEncoding)] = encodeUnicode(
                    item, kb.pageEncoding)

            post = encodeUnicode(post, kb.pageEncoding)

            if method:
                req = MethodRequest(url, post, headers)
                req.set_method(method)
            else:
                req = urllib2.Request(url, post, headers)

            if not conf.dropSetCookie and conf.cj:
                for _, cookie in enumerate(conf.cj):
                    if not cookieStr:
                        cookieStr = "Cookie: "

                    cookie = getUnicode(cookie)
                    index = cookie.index(" for ")

                    cookieStr += "%s; " % cookie[8:index]

            if not req.has_header(HTTPHEADER.ACCEPT_ENCODING):
                requestHeaders += "%s: identity\n" % HTTPHEADER.ACCEPT_ENCODING

            requestHeaders += "\n".join([
                "%s: %s" % (header, value)
                for header, value in req.header_items()
            ])

            if not req.has_header(HTTPHEADER.COOKIE) and cookieStr:
                requestHeaders += "\n%s" % cookieStr[:-2]

            if not req.has_header(HTTPHEADER.CONNECTION):
                requestHeaders += "\n%s: close" % HTTPHEADER.CONNECTION

            requestMsg += "\n%s" % requestHeaders

            if post:
                requestMsg += "\n\n%s" % post

            requestMsg += "\n"

            logger.log(8, requestMsg)

            conn = urllib2.urlopen(req)

            if not kb.authHeader and req.has_header(HTTPHEADER.AUTHORIZATION):
                kb.authHeader = req.get_header(HTTPHEADER.AUTHORIZATION)

            if not kb.proxyAuthHeader and req.has_header(
                    HTTPHEADER.PROXY_AUTHORIZATION):
                kb.proxyAuthHeader = req.get_header(
                    HTTPHEADER.PROXY_AUTHORIZATION)

            if hasattr(conn, "setcookie"):
                kb.redirectSetCookie = conn.setcookie

            if hasattr(conn, "redurl") and hasattr(
                    conn, "redcode"
            ) and not conf.redirectHandled and not conf.realTest:
                msg = "sqlmap got a %d redirect to " % conn.redcode
                msg += "%s - What target address do you " % conn.redurl
                msg += "want to use from now on? %s " % conf.url
                msg += "(default) or provide another target address based "
                msg += "also on the redirection got from the application\n"

                while True:
                    choice = readInput(msg, default=None)

                    if not choice:
                        pass
                    else:
                        conf.url = choice
                        try:
                            parseTargetUrl()
                            return Connect.__getPageProxy(**kwargs)
                        except sqlmapSyntaxException:
                            continue

                    break

                conf.redirectHandled = True

            # Reset the number of connection retries
            kb.retriesCount = 0

            # Return response object
            if response:
                return conn, None

            # Get HTTP response
            page = conn.read()
            code = conn.code
            responseHeaders = conn.info()
            responseHeaders[URI_HTTP_HEADER] = conn.geturl()
            page = decodePage(page,
                              responseHeaders.get(HTTPHEADER.CONTENT_ENCODING),
                              responseHeaders.get(HTTPHEADER.CONTENT_TYPE))
            status = getUnicode(conn.msg)

            if extractRegexResult(META_REFRESH_REGEX, page, re.DOTALL
                                  | re.IGNORECASE) and not refreshing:
                url = extractRegexResult(META_REFRESH_REGEX, page,
                                         re.DOTALL | re.IGNORECASE)

                if url.lower().startswith('http://'):
                    kwargs['url'] = url
                else:
                    kwargs['url'] = conf.url[:conf.url.rfind('/') + 1] + url

                threadData.lastRedirectMsg = (threadData.lastRequestUID, page)
                kwargs['refreshing'] = True

                debugMsg = "got HTML meta refresh header"
                logger.debug(debugMsg)

                try:
                    return Connect.__getPageProxy(**kwargs)
                except sqlmapSyntaxException:
                    pass

            # Explicit closing of connection object
            if not conf.keepAlive:
                try:
                    conn.fp._sock.close()
                    conn.close()
                except Exception, msg:
                    warnMsg = "problem occured during connection closing ('%s')" % msg
                    logger.warn(warnMsg)

        except urllib2.HTTPError, e:
            page = None
            responseHeaders = None

            try:
                page = e.read()
                responseHeaders = e.info()
                responseHeaders[URI_HTTP_HEADER] = e.geturl()
                page = decodePage(
                    page, responseHeaders.get(HTTPHEADER.CONTENT_ENCODING),
                    responseHeaders.get(HTTPHEADER.CONTENT_TYPE))
            except socket.timeout:
                warnMsg = "connection timed out while trying "
                warnMsg += "to get error page information (%d)" % e.code
                logger.warn(warnMsg)
                return None, None
            except:
                pass

            code = e.code
            threadData.lastHTTPError = (threadData.lastRequestUID, code)

            if code not in kb.httpErrorCodes:
                kb.httpErrorCodes[code] = 0
            kb.httpErrorCodes[code] += 1

            status = getUnicode(e.msg)
            responseMsg += "[#%d] (%d %s):\n" % (threadData.lastRequestUID,
                                                 code, status)

            if responseHeaders:
                logHeaders = "\n".join([
                    "%s: %s" %
                    (key.capitalize() if isinstance(key, basestring) else key,
                     getUnicode(value))
                    for (key, value) in responseHeaders.items()
                ])

            logHTTPTraffic(
                requestMsg, "%s%s\n\n%s" %
                (responseMsg, logHeaders,
                 page if isinstance(page, unicode) else getUnicode(page)))

            if conf.verbose <= 5:
                responseMsg += getUnicode(logHeaders)
            elif conf.verbose > 5:
                responseMsg += "%s\n\n%s\n" % (logHeaders, page)

            logger.log(7, responseMsg)

            if e.code == 401:
                errMsg = "not authorized, try to provide right HTTP "
                errMsg += "authentication type and valid credentials (%d)" % code
                raise sqlmapConnectionException, errMsg
            elif e.code == 404 and raise404:
                errMsg = "page not found (%d)" % code
                raise sqlmapConnectionException, errMsg
            else:
                debugMsg = "got HTTP error code: %d (%s)" % (code, status)
                logger.debug(debugMsg)
                page = processResponse(page, responseHeaders)
                return page, responseHeaders
Пример #21
0
    def http_error_302(self, req, fp, code, msg, headers):
        start = time.time()
        content = None
        redurl = self._get_header_redirect(headers) if not conf.ignoreRedirects else None

        try:
            content = fp.read(MAX_CONNECTION_TOTAL_SIZE)
        except Exception as ex:
            dbgMsg = "there was a problem while retrieving "
            dbgMsg += "redirect response content ('%s')" % getSafeExString(ex)
            logger.debug(dbgMsg)
        finally:
            if content:
                try:  # try to write it back to the read buffer so we could reuse it in further steps
                    fp.fp._rbuf.truncate(0)
                    fp.fp._rbuf.write(content)
                except:
                    pass

        content = decodePage(content, headers.get(HTTP_HEADER.CONTENT_ENCODING), headers.get(HTTP_HEADER.CONTENT_TYPE))

        threadData = getCurrentThreadData()
        threadData.lastRedirectMsg = (threadData.lastRequestUID, content)

        redirectMsg = "HTTP redirect "
        redirectMsg += "[#%d] (%d %s):\r\n" % (threadData.lastRequestUID, code, getUnicode(msg))

        if headers:
            logHeaders = "\r\n".join("%s: %s" % (getUnicode(key.capitalize() if hasattr(key, "capitalize") else key), getUnicode(value)) for (key, value) in headers.items())
        else:
            logHeaders = ""

        redirectMsg += logHeaders
        if content:
            redirectMsg += "\r\n\r\n%s" % getUnicode(content[:MAX_CONNECTION_CHUNK_SIZE])

        logHTTPTraffic(threadData.lastRequestMsg, redirectMsg, start, time.time())
        logger.log(CUSTOM_LOGGING.TRAFFIC_IN, redirectMsg)

        if redurl:
            try:
                if not _urllib.parse.urlsplit(redurl).netloc:
                    redurl = _urllib.parse.urljoin(req.get_full_url(), redurl)

                self._infinite_loop_check(req)
                self._ask_redirect_choice(code, redurl, req.get_method())
            except ValueError:
                redurl = None
                result = fp

        if redurl and kb.redirectChoice == REDIRECTION.YES:
            parseResponse(content, headers)

            req.headers[HTTP_HEADER.HOST] = getHostHeader(redurl)
            if headers and HTTP_HEADER.SET_COOKIE in headers:
                cookies = dict()
                delimiter = conf.cookieDel or DEFAULT_COOKIE_DELIMITER
                last = None

                for part in req.headers.get(HTTP_HEADER.COOKIE, "").split(delimiter) + headers.getheaders(HTTP_HEADER.SET_COOKIE):
                    if '=' in part:
                        part = part.strip()
                        key, value = part.split('=', 1)
                        cookies[key] = value
                        last = key
                    elif last:
                        cookies[last] += "%s%s" % (delimiter, part)

                req.headers[HTTP_HEADER.COOKIE] = delimiter.join("%s=%s" % (key, cookies[key]) for key in cookies)

            try:
                result = _urllib.request.HTTPRedirectHandler.http_error_302(self, req, fp, code, msg, headers)
            except _urllib.error.HTTPError as ex:
                result = ex

                # Dirty hack for http://bugs.python.org/issue15701
                try:
                    result.info()
                except AttributeError:
                    def _(self):
                        return getattr(self, "hdrs") or {}
                    result.info = types.MethodType(_, result)

                if not hasattr(result, "read"):
                    def _(self, length=None):
                        return ex.msg
                    result.read = types.MethodType(_, result)

                if not getattr(result, "url", None):
                    result.url = redurl

                if not getattr(result, "code", None):
                    result.code = 999
            except:
                redurl = None
                result = fp
                fp.read = io.BytesIO("").read
        else:
            result = fp

        threadData.lastRedirectURL = (threadData.lastRequestUID, redurl)

        result.redcode = code
        result.redurl = redurl
        return result
Пример #22
0
    def getPage(**kwargs):
        """
        This method connects to the target url or proxy and returns
        the target url page content
        """

        if conf.delay is not None and isinstance(conf.delay, (int, float)) and conf.delay > 0:
            time.sleep(conf.delay)

        url       = kwargs.get('url',       conf.url).replace(" ", "%20")
        get       = kwargs.get('get',       None)
        post      = kwargs.get('post',      None)
        cookie    = kwargs.get('cookie',    None)
        ua        = kwargs.get('ua',        None)
        direct    = kwargs.get('direct',    False)
        multipart = kwargs.get('multipart', False)
        silent    = kwargs.get('silent',    False)
        raise404  = kwargs.get('raise404',  True)

        page            = ""
        cookieStr       = ""
        requestMsg      = "HTTP request:\n%s " % conf.method
        requestMsg     += "%s" % urlparse.urlsplit(url)[2] or "/"
        responseMsg     = "HTTP response "
        requestHeaders  = ""
        responseHeaders = ""

        try:
            if silent:
                socket.setdefaulttimeout(3)
    
            if direct:
                if "?" in url:
                    url, params = url.split("?")
                    params = urlencode(params)
                    url = "%s?%s" % (url, params)
                    requestMsg += "?%s" % params
    
            elif multipart:
                #needed in this form because of potential circle dependency problem (option -> update -> connect -> option)
                from lib.core.option import proxyHandler
                
                multipartOpener = urllib2.build_opener(proxyHandler, multipartpost.MultipartPostHandler)
                conn = multipartOpener.open(url, multipart)
                page = conn.read()            
                responseHeaders = conn.info()
    
                encoding = responseHeaders.get("Content-Encoding")
                page = decodePage(page, encoding)
    
                return page
    
            else:
                if conf.parameters.has_key("GET") and not get:
                    get = conf.parameters["GET"]
    
                if get:
                    get = urlencode(get)
                    url = "%s?%s" % (url, get)
                    requestMsg += "?%s" % get
    
                if conf.method == "POST":
                    if conf.parameters.has_key("POST") and not post:
                        post = conf.parameters["POST"]
    
            requestMsg += " HTTP/1.1"

            # Perform HTTP request
            headers        = forgeHeaders(cookie, ua)
            req            = urllib2.Request(url, post, headers)
            conn           = urllib2.urlopen(req)

            # Reset the number of connection retries
            conf.retriesCount = 0

            if not req.has_header("Accept-Encoding"):
                requestHeaders += "\nAccept-Encoding: identity"

            requestHeaders = "\n".join(["%s: %s" % (header, value) for header, value in req.header_items()])

            if not conf.dropSetCookie:
                for _, cookie in enumerate(conf.cj):
                    if not cookieStr:
                        cookieStr = "Cookie: "
    
                    cookie = str(cookie)
                    index  = cookie.index(" for ")
    
                    cookieStr += "%s; " % cookie[8:index]

            if not req.has_header("Cookie") and cookieStr:
                requestHeaders += "\n%s" % cookieStr[:-2]
     
            if not req.has_header("Connection"):
                requestHeaders += "\nConnection: close"

            requestMsg += "\n%s" % requestHeaders

            if post:
                requestMsg += "\n%s" % post

            requestMsg += "\n"

            logger.log(9, requestMsg)

            # Get HTTP response
            page            = conn.read()
            code            = conn.code
            status          = conn.msg
            responseHeaders = conn.info()
            
            encoding = responseHeaders.get("Content-Encoding")
            page = decodePage(page, encoding)

        except urllib2.HTTPError, e:
            if e.code == 401:
                exceptionMsg  = "not authorized, try to provide right HTTP "
                exceptionMsg += "authentication type and valid credentials"
                raise sqlmapConnectionException, exceptionMsg
            elif e.code == 404 and raise404:
                exceptionMsg = "page not found"
                raise sqlmapConnectionException, exceptionMsg
            else:
                page = e.read()
                code = e.code
                status = e.msg
                responseHeaders = e.info()

                debugMsg = "got HTTP error code: %d" % code
                logger.debug(debugMsg)
Пример #23
0
    def getPage(**kwargs):
        """
        This method connects to the target url or proxy and returns
        the target url page content
        """

        if conf.delay is not None and isinstance(conf.delay, (int, float)) and conf.delay > 0:
            time.sleep(conf.delay)
        elif conf.cpuThrottle:
            cpuThrottle(conf.cpuThrottle)

        threadData = getCurrentThreadData()
        threadData.lastRequestUID += 1

        # dirty hack because urllib2 just skips the other part of provided url
        # splitted with space char while urlencoding it in the later phase
        url             = kwargs.get('url',           conf.url).replace(" ", "%20")

        get             = kwargs.get('get',           None)
        post            = kwargs.get('post',          None)
        method          = kwargs.get('method',        None)
        cookie          = kwargs.get('cookie',        None)
        ua              = kwargs.get('ua',            None)
        referer         = kwargs.get('referer',       None)
        direct          = kwargs.get('direct',        False)
        multipart       = kwargs.get('multipart',     False)
        silent          = kwargs.get('silent',        False)
        raise404        = kwargs.get('raise404',      True)
        auxHeaders      = kwargs.get('auxHeaders',    None)
        response        = kwargs.get('response',      False)
        ignoreTimeout   = kwargs.get('ignoreTimeout', False)
        refreshing      = kwargs.get('refreshing',    False)

        page            = ""
        cookieStr       = ""
        requestMsg      = "HTTP request [#%d]:\n%s " % (threadData.lastRequestUID, conf.method)
        requestMsg     += "%s" % urlparse.urlsplit(url)[2] or "/"
        responseMsg     = "HTTP response "
        requestHeaders  = ""
        responseHeaders = None
        logHeaders      = ""

        try:
            if silent:
                socket.setdefaulttimeout(HTTP_SILENT_TIMEOUT)

            if direct:
                if "?" in url:
                    url, params = url.split("?")
                    params = urlencode(params)
                    url = "%s?%s" % (url, params)
                    requestMsg += "?%s" % params

            elif multipart:
                # Needed in this form because of potential circle dependency 
                # problem (option -> update -> connect -> option)
                from lib.core.option import proxyHandler

                multipartOpener = urllib2.build_opener(proxyHandler, multipartpost.MultipartPostHandler)
                conn = multipartOpener.open(url, multipart)
                page = conn.read()
                responseHeaders = conn.info()
                responseHeaders[URI_HTTP_HEADER] = conn.geturl()
                page = decodePage(page, responseHeaders.get(HTTPHEADER.CONTENT_ENCODING), responseHeaders.get(HTTPHEADER.CONTENT_TYPE))

                return page

            elif refreshing:
                # Reference(s): 
                # http://vancouver-webpages.com/META/metatags.detail.html
                # http://webdesign.about.com/od/metataglibraries/a/aa080300a.htm
                get = None
                post = None

            else:
                if conf.parameters.has_key(PLACE.GET) and not get:
                    get = conf.parameters[PLACE.GET]

                if get:
                    url = "%s?%s" % (url, get)
                    requestMsg += "?%s" % get

                if conf.method == HTTPMETHOD.POST:
                    if conf.parameters.has_key(PLACE.POST) and not post:
                        post = conf.parameters[PLACE.POST]

            requestMsg += " %s" % httplib.HTTPConnection._http_vsn_str

            # Perform HTTP request
            headers = forgeHeaders(cookie, ua, referer)

            if conf.realTest:
                headers[HTTPHEADER.REFERER] = "%s://%s" % (conf.scheme, conf.hostname)

            if kb.authHeader:
                headers[HTTPHEADER.AUTHORIZATION] = kb.authHeader

            if kb.proxyAuthHeader:
                headers[HTTPHEADER.PROXY_AUTHORIZATION] = kb.proxyAuthHeader

            if auxHeaders:
                for key, item in auxHeaders.items():
                    headers[key] = item

            for key, item in headers.items():
                del headers[key]
                headers[encodeUnicode(key, kb.pageEncoding)] = encodeUnicode(item, kb.pageEncoding)

            post = encodeUnicode(post, kb.pageEncoding)

            if method:
                req = MethodRequest(url, post, headers)
                req.set_method(method)
            else:
                req = urllib2.Request(url, post, headers)

            if not conf.dropSetCookie and conf.cj:
                for _, cookie in enumerate(conf.cj):
                    if not cookieStr:
                        cookieStr = "Cookie: "

                    cookie = getUnicode(cookie)
                    index  = cookie.index(" for ")

                    cookieStr += "%s; " % cookie[8:index]

            if not req.has_header(HTTPHEADER.ACCEPT_ENCODING):
                requestHeaders += "%s: identity\n" % HTTPHEADER.ACCEPT_ENCODING

            requestHeaders += "\n".join(["%s: %s" % (header, value) for header, value in req.header_items()])

            if not req.has_header(HTTPHEADER.COOKIE) and cookieStr:
                requestHeaders += "\n%s" % cookieStr[:-2]

            if not req.has_header(HTTPHEADER.CONNECTION):
                requestHeaders += "\n%s: close" % HTTPHEADER.CONNECTION

            requestMsg += "\n%s" % requestHeaders

            if post:
                requestMsg += "\n\n%s" % post

            requestMsg += "\n"

            logger.log(8, requestMsg)

            conn = urllib2.urlopen(req)

            if not kb.authHeader and req.has_header(HTTPHEADER.AUTHORIZATION):
                kb.authHeader = req.get_header(HTTPHEADER.AUTHORIZATION)

            if not kb.proxyAuthHeader and req.has_header(HTTPHEADER.PROXY_AUTHORIZATION):
                kb.proxyAuthHeader = req.get_header(HTTPHEADER.PROXY_AUTHORIZATION)

            if hasattr(conn, "setcookie"):
                kb.redirectSetCookie = conn.setcookie

            if hasattr(conn, "redurl") and hasattr(conn, "redcode") and not conf.redirectHandled and not conf.realTest:
                msg  = "sqlmap got a %d redirect to " % conn.redcode
                msg += "%s - What target address do you " % conn.redurl
                msg += "want to use from now on? %s " % conf.url
                msg += "(default) or provide another target address based "
                msg += "also on the redirection got from the application\n"

                while True:
                    choice = readInput(msg, default=None)

                    if not choice:
                        pass
                    else:
                        conf.url = choice
                        try:
                            parseTargetUrl()
                            return Connect.__getPageProxy(**kwargs)
                        except sqlmapSyntaxException:
                            continue

                    break

                conf.redirectHandled = True

            # Reset the number of connection retries
            kb.retriesCount = 0

            # Return response object
            if response:
                return conn, None

            # Get HTTP response
            page = conn.read()
            code = conn.code
            responseHeaders = conn.info()
            responseHeaders[URI_HTTP_HEADER] = conn.geturl()
            page = decodePage(page, responseHeaders.get(HTTPHEADER.CONTENT_ENCODING), responseHeaders.get(HTTPHEADER.CONTENT_TYPE))
            status = getUnicode(conn.msg)

            if extractRegexResult(META_REFRESH_REGEX, page, re.DOTALL | re.IGNORECASE) and not refreshing:
                url = extractRegexResult(META_REFRESH_REGEX, page, re.DOTALL | re.IGNORECASE)

                if url.lower().startswith('http://'):
                    kwargs['url'] = url
                else:
                    kwargs['url'] = conf.url[:conf.url.rfind('/')+1] + url

                threadData.lastRedirectMsg = (threadData.lastRequestUID, page)
                kwargs['refreshing'] = True

                debugMsg = "got HTML meta refresh header"
                logger.debug(debugMsg)

                try:
                    return Connect.__getPageProxy(**kwargs)
                except sqlmapSyntaxException:
                    pass

            # Explicit closing of connection object
            if not conf.keepAlive:
                try:
                    conn.fp._sock.close()
                    conn.close()
                except Exception, msg:
                    warnMsg = "problem occured during connection closing ('%s')" % msg
                    logger.warn(warnMsg)

        except urllib2.HTTPError, e:
            page = None
            responseHeaders = None

            try:
                page = e.read()
                responseHeaders = e.info()
                responseHeaders[URI_HTTP_HEADER] = e.geturl()
                page = decodePage(page, responseHeaders.get(HTTPHEADER.CONTENT_ENCODING), responseHeaders.get(HTTPHEADER.CONTENT_TYPE))
            except socket.timeout:
                warnMsg  = "connection timed out while trying "
                warnMsg += "to get error page information (%d)" % e.code
                logger.warn(warnMsg)
                return None, None
            except:
                pass

            code = e.code
            threadData.lastHTTPError = (threadData.lastRequestUID, code)

            if code not in kb.httpErrorCodes:
                kb.httpErrorCodes[code] = 0
            kb.httpErrorCodes[code] += 1

            status = getUnicode(e.msg)
            responseMsg += "[#%d] (%d %s):\n" % (threadData.lastRequestUID, code, status)

            if responseHeaders:
                logHeaders = "\n".join(["%s: %s" % (key.capitalize() if isinstance(key, basestring) else key, getUnicode(value)) for (key, value) in responseHeaders.items()])

            logHTTPTraffic(requestMsg, "%s%s\n\n%s" % (responseMsg, logHeaders, page if isinstance(page, unicode) else getUnicode(page)))

            if conf.verbose <= 5:
                responseMsg += getUnicode(logHeaders)
            elif conf.verbose > 5:
                responseMsg += "%s\n\n%s\n" % (logHeaders, page)

            logger.log(7, responseMsg)

            if e.code == 401:
                errMsg  = "not authorized, try to provide right HTTP "
                errMsg += "authentication type and valid credentials (%d)" % code
                raise sqlmapConnectionException, errMsg
            elif e.code == 404 and raise404:
                errMsg = "page not found (%d)" % code
                raise sqlmapConnectionException, errMsg
            else:
                debugMsg = "got HTTP error code: %d (%s)" % (code, status)
                logger.debug(debugMsg)
                page = processResponse(page, responseHeaders)
                return page, responseHeaders
Пример #24
0
def _search(dork):
    """
    This method performs the effective search on Google providing
    the google dork and the Google session cookie
    """

    if not dork:
        return None

    data = None
    headers = {}

    headers[HTTP_HEADER.USER_AGENT] = dict(conf.httpHeaders).get(
        HTTP_HEADER.USER_AGENT, DUMMY_SEARCH_USER_AGENT)
    headers[HTTP_HEADER.ACCEPT_ENCODING] = HTTP_ACCEPT_ENCODING_HEADER_VALUE

    try:
        req = _urllib.request.Request("https://www.google.com/ncr",
                                      headers=headers)
        conn = _urllib.request.urlopen(req)
    except Exception as ex:
        errMsg = "unable to connect to Google ('%s')" % getSafeExString(ex)
        raise SqlmapConnectionException(errMsg)

    gpage = conf.googlePage if conf.googlePage > 1 else 1
    logger.info("using search result page #%d" % gpage)

    url = "https://www.google.com/search?"
    url += "q=%s&" % urlencode(dork, convall=True)
    url += "num=100&hl=en&complete=0&safe=off&filter=0&btnG=Search"
    url += "&start=%d" % ((gpage - 1) * 100)

    try:
        req = _urllib.request.Request(url, headers=headers)
        conn = _urllib.request.urlopen(req)

        requestMsg = "HTTP request:\nGET %s" % url
        requestMsg += " %s" % _http_client.HTTPException._http_vsn_str
        logger.log(CUSTOM_LOGGING.TRAFFIC_OUT, requestMsg)

        page = conn.read()
        code = conn.code
        status = conn.msg
        responseHeaders = conn.info()
        page = decodePage(page, responseHeaders.get("Content-Encoding"),
                          responseHeaders.get("Content-Type"))

        responseMsg = "HTTP response (%s - %d):\n" % (status, code)

        if conf.verbose <= 4:
            responseMsg += getUnicode(responseHeaders, UNICODE_ENCODING)
        elif conf.verbose > 4:
            responseMsg += "%s\n%s\n" % (responseHeaders, page)

        logger.log(CUSTOM_LOGGING.TRAFFIC_IN, responseMsg)
    except _urllib.error.HTTPError as ex:
        try:
            page = ex.read()
        except Exception as _:
            warnMsg = "problem occurred while trying to get "
            warnMsg += "an error page information (%s)" % getSafeExString(_)
            logger.critical(warnMsg)
            return None
    except (_urllib.error.URLError, _http_client.error, socket.error,
            socket.timeout, socks.ProxyError):
        errMsg = "unable to connect to Google"
        raise SqlmapConnectionException(errMsg)

    retVal = [
        _urllib.parse.unquote(match.group(1) or match.group(2))
        for match in re.finditer(GOOGLE_REGEX, page, re.I)
    ]

    if not retVal and "detected unusual traffic" in page:
        warnMsg = "Google has detected 'unusual' traffic from "
        warnMsg += "used IP address disabling further searches"

        if conf.proxyList:
            raise SqlmapBaseException(warnMsg)
        else:
            logger.critical(warnMsg)

    if not retVal:
        message = "no usable links found. What do you want to do?"
        message += "\n[1] (re)try with DuckDuckGo (default)"
        message += "\n[2] (re)try with Bing"
        message += "\n[3] quit"
        choice = readInput(message, default='1')

        if choice == '3':
            raise SqlmapUserQuitException
        elif choice == '2':
            url = "https://www.bing.com/search?q=%s&first=%d" % (urlencode(
                dork, convall=True), (gpage - 1) * 10 + 1)
            regex = BING_REGEX
        else:
            url = "https://duckduckgo.com/html/"
            data = "q=%s&s=%d" % (urlencode(dork, convall=True),
                                  (gpage - 1) * 30)
            regex = DUCKDUCKGO_REGEX

        try:
            req = _urllib.request.Request(url, data=data, headers=headers)
            conn = _urllib.request.urlopen(req)

            requestMsg = "HTTP request:\nGET %s" % url
            requestMsg += " %s" % _http_client.HTTPException._http_vsn_str
            logger.log(CUSTOM_LOGGING.TRAFFIC_OUT, requestMsg)

            page = conn.read()
            code = conn.code
            status = conn.msg
            responseHeaders = conn.info()
            page = decodePage(page, responseHeaders.get("Content-Encoding"),
                              responseHeaders.get("Content-Type"))

            responseMsg = "HTTP response (%s - %d):\n" % (status, code)

            if conf.verbose <= 4:
                responseMsg += getUnicode(responseHeaders, UNICODE_ENCODING)
            elif conf.verbose > 4:
                responseMsg += "%s\n%s\n" % (responseHeaders, page)

            logger.log(CUSTOM_LOGGING.TRAFFIC_IN, responseMsg)
        except _urllib.error.HTTPError as ex:
            try:
                page = ex.read()
                page = decodePage(page, ex.headers.get("Content-Encoding"),
                                  ex.headers.get("Content-Type"))
            except socket.timeout:
                warnMsg = "connection timed out while trying "
                warnMsg += "to get error page information (%d)" % ex.code
                logger.critical(warnMsg)
                return None
        except:
            errMsg = "unable to connect"
            raise SqlmapConnectionException(errMsg)

        retVal = [
            _urllib.parse.unquote(match.group(1).replace("&amp;", "&"))
            for match in re.finditer(regex, page, re.I | re.S)
        ]

        if not retVal and "issue with the Tor Exit Node you are currently using" in page:
            warnMsg = "DuckDuckGo has detected 'unusual' traffic from "
            warnMsg += "used (Tor) IP address"

            if conf.proxyList:
                raise SqlmapBaseException(warnMsg)
            else:
                logger.critical(warnMsg)

    return retVal
Пример #25
0
    def http_error_302(self, req, fp, code, msg, headers):
        start = time.time()
        content = None
        redurl = self._get_header_redirect(
            headers) if not conf.ignoreRedirects else None

        try:
            content = fp.read(MAX_CONNECTION_TOTAL_SIZE)
        except:  # e.g. IncompleteRead
            content = ""
        finally:
            if content:
                try:  # try to write it back to the read buffer so we could reuse it in further steps
                    fp.fp._rbuf.truncate(0)
                    fp.fp._rbuf.write(content)
                except:
                    pass

        content = decodePage(content,
                             headers.get(HTTP_HEADER.CONTENT_ENCODING),
                             headers.get(HTTP_HEADER.CONTENT_TYPE))

        threadData = getCurrentThreadData()
        threadData.lastRedirectMsg = (threadData.lastRequestUID, content)

        redirectMsg = "HTTP redirect "
        redirectMsg += "[#%d] (%d %s):\r\n" % (threadData.lastRequestUID, code,
                                               getUnicode(msg))

        if headers:
            logHeaders = "\r\n".join("%s: %s" % (getUnicode(key.capitalize(
            ) if hasattr(key, "capitalize") else key), getUnicode(value))
                                     for (key, value) in headers.items())
        else:
            logHeaders = ""

        redirectMsg += logHeaders
        if content:
            redirectMsg += "\r\n\r\n%s" % getUnicode(
                content[:MAX_CONNECTION_READ_SIZE])

        logHTTPTraffic(threadData.lastRequestMsg, redirectMsg, start,
                       time.time())
        logger.log(CUSTOM_LOGGING.TRAFFIC_IN, redirectMsg)

        if redurl:
            try:
                if not _urllib.parse.urlsplit(redurl).netloc:
                    redurl = _urllib.parse.urljoin(req.get_full_url(), redurl)

                self._infinite_loop_check(req)
                self._ask_redirect_choice(code, redurl, req.get_method())
            except ValueError:
                redurl = None
                result = fp

        if redurl and kb.redirectChoice == REDIRECTION.YES:
            parseResponse(content, headers)

            req.headers[HTTP_HEADER.HOST] = getHostHeader(redurl)
            if headers and HTTP_HEADER.SET_COOKIE in headers:
                cookies = dict()
                delimiter = conf.cookieDel or DEFAULT_COOKIE_DELIMITER
                last = None

                for part in req.headers.get(
                        HTTP_HEADER.COOKIE, "").split(delimiter) + ([
                            headers[HTTP_HEADER.SET_COOKIE]
                        ] if HTTP_HEADER.SET_COOKIE in headers else []):
                    if '=' in part:
                        part = part.strip()
                        key, value = part.split('=', 1)
                        cookies[key] = value
                        last = key
                    elif last:
                        cookies[last] += "%s%s" % (delimiter, part)

                req.headers[HTTP_HEADER.COOKIE] = delimiter.join(
                    "%s=%s" % (key, cookies[key]) for key in cookies)

            try:
                result = _urllib.request.HTTPRedirectHandler.http_error_302(
                    self, req, fp, code, msg, headers)
            except _urllib.error.HTTPError as ex:
                result = ex

                # Dirty hack for https://github.com/sqlmapproject/sqlmap/issues/4046
                try:
                    hasattr(result, "read")
                except KeyError:

                    class _(object):
                        pass

                    result = _()

                # Dirty hack for http://bugs.python.org/issue15701
                try:
                    result.info()
                except AttributeError:

                    def _(self):
                        return getattr(self, "hdrs", {})

                    result.info = types.MethodType(_, result)

                if not hasattr(result, "read"):

                    def _(self, length=None):
                        try:
                            retVal = getSafeExString(
                                ex
                            )  # Note: pyflakes mistakenly marks 'ex' as undefined (NOTE: tested in both Python2 and Python3)
                        except:
                            retVal = ""
                        return retVal

                    result.read = types.MethodType(_, result)

                if not getattr(result, "url", None):
                    result.url = redurl

                if not getattr(result, "code", None):
                    result.code = 999
            except:
                redurl = None
                result = fp
                fp.read = io.BytesIO(b"").read
        else:
            result = fp

        threadData.lastRedirectURL = (threadData.lastRequestUID, redurl)

        result.redcode = code
        result.redurl = getUnicode(redurl)
        return result
Пример #26
0
    def http_error_302(self, req, fp, code, msg, headers):
        start = time.time()
        content = None
        redurl = self._get_header_redirect(
            headers) if not conf.ignoreRedirects else None

        try:
            content = fp.read(MAX_CONNECTION_TOTAL_SIZE)
        except Exception as ex:
            dbgMsg = "there was a problem while retrieving "
            dbgMsg += "redirect response content ('%s')" % getSafeExString(ex)
            logger.debug(dbgMsg)
        finally:
            if content:
                try:  # try to write it back to the read buffer so we could reuse it in further steps
                    fp.fp._rbuf.truncate(0)
                    fp.fp._rbuf.write(content)
                except:
                    pass

        content = decodePage(content,
                             headers.get(HTTP_HEADER.CONTENT_ENCODING),
                             headers.get(HTTP_HEADER.CONTENT_TYPE))

        threadData = getCurrentThreadData()
        threadData.lastRedirectMsg = (threadData.lastRequestUID, content)

        redirectMsg = "HTTP redirect "
        redirectMsg += "[#%d] (%d %s):\r\n" % (threadData.lastRequestUID, code,
                                               getUnicode(msg))

        if headers:
            logHeaders = "\r\n".join("%s: %s" % (getUnicode(key.capitalize(
            ) if isinstance(key, basestring) else key), getUnicode(value))
                                     for (key, value) in headers.items())
        else:
            logHeaders = ""

        redirectMsg += logHeaders
        if content:
            redirectMsg += "\r\n\r\n%s" % getUnicode(
                content[:MAX_CONNECTION_CHUNK_SIZE])

        logHTTPTraffic(threadData.lastRequestMsg, redirectMsg, start,
                       time.time())
        logger.log(CUSTOM_LOGGING.TRAFFIC_IN, redirectMsg)

        if redurl:
            try:
                if not urlparse.urlsplit(redurl).netloc:
                    redurl = urlparse.urljoin(req.get_full_url(), redurl)

                self._infinite_loop_check(req)
                self._ask_redirect_choice(code, redurl, req.get_method())
            except ValueError:
                redurl = None
                result = fp

        if redurl and kb.redirectChoice == REDIRECTION.YES:
            parseResponse(content, headers)

            req.headers[HTTP_HEADER.HOST] = getHostHeader(redurl)
            if headers and HTTP_HEADER.SET_COOKIE in headers:
                cookies = dict()
                delimiter = conf.cookieDel or DEFAULT_COOKIE_DELIMITER
                last = None

                for part in req.headers.get(
                        HTTP_HEADER.COOKIE,
                        "").split(delimiter) + headers.getheaders(
                            HTTP_HEADER.SET_COOKIE):
                    if '=' in part:
                        part = part.strip()
                        key, value = part.split('=', 1)
                        cookies[key] = value
                        last = key
                    elif last:
                        cookies[last] += "%s%s" % (delimiter, part)

                req.headers[HTTP_HEADER.COOKIE] = delimiter.join(
                    "%s=%s" % (key, cookies[key]) for key in cookies)

            try:
                result = urllib2.HTTPRedirectHandler.http_error_302(
                    self, req, fp, code, msg, headers)
            except urllib2.HTTPError as ex:
                result = ex

                # Dirty hack for http://bugs.python.org/issue15701
                try:
                    result.info()
                except AttributeError:

                    def _(self):
                        return getattr(self, "hdrs") or {}

                    result.info = types.MethodType(_, result)

                if not hasattr(result, "read"):

                    def _(self, length=None):
                        return ex.msg

                    result.read = types.MethodType(_, result)

                if not getattr(result, "url", None):
                    result.url = redurl

                if not getattr(result, "code", None):
                    result.code = 999
            except:
                redurl = None
                result = fp
                fp.read = StringIO("").read
        else:
            result = fp

        threadData.lastRedirectURL = (threadData.lastRequestUID, redurl)

        result.redcode = code
        result.redurl = redurl
        return result
Пример #27
0
def _search(dork): 
    """ 
    This method performs the effective search on Google providing 
    the google dork and the Google session cookie 
    """ 
    retVal = [] 
    paths = [] 

    if not dork: 
        return None 

    headers = {} 

    headers[HTTP_HEADER.USER_AGENT] = dict(conf.httpHeaders).get(HTTP_HEADER.USER_AGENT, DUMMY_SEARCH_USER_AGENT) 
    headers[HTTP_HEADER.ACCEPT_ENCODING] = HTTP_ACCEPT_ENCODING_HEADER_VALUE 

    gpage = conf.googlePage if conf.googlePage > 1 else 1 

#polluted by xi4okv QQ£º48011203 

    for gpage in xrange(1,10): 
        logger.info("using search result page #%d" % gpage) 

        url = "https://m.baidu.com/s?" 
        url += "word=%s&" % urlencode(dork, convall=True) 
        url += "&pn=%d" % ((gpage - 1) * 10) 

        try: 
            req = urllib2.Request(url, headers=headers) 
            conn = urllib2.urlopen(req) 

            requestMsg = "HTTP request:\nGET %s" % url 
            requestMsg += " %s" % httplib.HTTPConnection._http_vsn_str 
            logger.log(CUSTOM_LOGGING.TRAFFIC_OUT, requestMsg) 

            page = conn.read() 
            code = conn.code 
            status = conn.msg 

            responseHeaders = conn.info() 
            page = decodePage(page, responseHeaders.get("Content-Encoding"), responseHeaders.get("Content-Type")) 
            #print page 

            responseMsg = "HTTP response (%s - %d):\n" % (status, code) 

            if conf.verbose <= 4: 
                responseMsg += getUnicode(responseHeaders, UNICODE_ENCODING) 
            elif conf.verbose > 4: 
                responseMsg += "%s\n%s\n" % (responseHeaders, page) 

            logger.log(CUSTOM_LOGGING.TRAFFIC_IN, responseMsg) 
        except urllib2.HTTPError, e: 
            pass 

        urls = [urllib.unquote(match.group(0) or match.group(1)) for match in re.finditer(GOOGLE_REGEX, page, re.I)] 
        #retVal = re.findall(GOOGLE_REGEX, page, re.I) 

        import urlparse 

        for url in urls: 
            urls_pat = re.compile(r"http://(.*)[^']") 
            aurl = re.findall(urls_pat, url) 
            if "?" in url and "baidu" not in url: 
                xpath = urlparse.urlparse(url).path 
                if xpath not in paths: 
                    paths.append(xpath) 
                    retVal.append(aurl[0])