def dbTableValues(self, tableValues): replication = None rtable = None dumpFP = None appendToFile = False warnFile = False if tableValues is None: return db = tableValues["__infos__"]["db"] if not db: db = "All" table = tableValues["__infos__"]["table"] if conf.api: self._write(tableValues, content_type=CONTENT_TYPE.DUMP_TABLE) return dumpDbPath = os.path.join(conf.dumpPath, unsafeSQLIdentificatorNaming(db)) if conf.dumpFormat == DUMP_FORMAT.SQLITE: replication = Replication( os.path.join(conf.dumpPath, "%s.sqlite3" % unsafeSQLIdentificatorNaming(db))) elif conf.dumpFormat in (DUMP_FORMAT.CSV, DUMP_FORMAT.HTML): if not os.path.isdir(dumpDbPath): try: os.makedirs(dumpDbPath) except: warnFile = True _ = unicodeencode( re.sub(r"[^\w]", UNSAFE_DUMP_FILEPATH_REPLACEMENT, unsafeSQLIdentificatorNaming(db))) dumpDbPath = os.path.join( conf.dumpPath, "%s-%s" % (_, hashlib.md5(unicodeencode(db)).hexdigest()[:8])) if not os.path.isdir(dumpDbPath): try: os.makedirs(dumpDbPath) except Exception, ex: try: tempDir = tempfile.mkdtemp(prefix="sqlmapdb") except IOError, _: errMsg = "unable to write to the temporary directory ('%s'). " % _ errMsg += "Please make sure that your disk is not full and " errMsg += "that you have sufficient write permissions to " errMsg += "create temporary files and/or directories" raise SqlmapSystemException(errMsg) warnMsg = "unable to create dump directory " warnMsg += "'%s' (%s). " % (dumpDbPath, getSafeExString(ex)) warnMsg += "Using temporary directory '%s' instead" % tempDir logger.warn(warnMsg) dumpDbPath = tempDir
def dbTableValues(self, tableValues): replication = None rtable = None dumpFP = None appendToFile = False warnFile = False if tableValues is None: return db = tableValues["__infos__"]["db"] if not db: db = "All" table = tableValues["__infos__"]["table"] if conf.api: self._write(tableValues, content_type=CONTENT_TYPE.DUMP_TABLE) return dumpDbPath = os.path.join(conf.dumpPath, unsafeSQLIdentificatorNaming(db)) if conf.dumpFormat == DUMP_FORMAT.SQLITE: replication = Replication( os.path.join(conf.dumpPath, "%s.sqlite3" % unsafeSQLIdentificatorNaming(db))) elif conf.dumpFormat in (DUMP_FORMAT.CSV, DUMP_FORMAT.HTML): if not os.path.isdir(dumpDbPath): try: os.makedirs(dumpDbPath, 0755) except: warnFile = True _ = unicodeencode( re.sub(r"[^\w]", "_", unsafeSQLIdentificatorNaming(db))) dumpDbPath = os.path.join( conf.dumpPath, "%s-%s" % (_, hashlib.md5(unicodeencode(db)).hexdigest()[:8])) if not os.path.isdir(dumpDbPath): try: os.makedirs(dumpDbPath, 0755) except Exception, ex: try: tempDir = tempfile.mkdtemp(prefix="sqlmapdb") except IOError, _: errMsg = u"无法写入临时目录 ('%s').请确保您的磁盘未满,并且您具有足够的写权限来创建临时文件或目录 " % _ raise SqlmapSystemException(errMsg) warnMsg = u"无法创建转储目录 " warnMsg += u"'%s' (%s). " % (dumpDbPath, getSafeExString(ex)) warnMsg += u"使用临时目录'%s'代替" % tempDir logger.warn(warnMsg) dumpDbPath = tempDir
def dbTableValues(self, tableValues): replication = None rtable = None dumpFP = None appendToFile = False warnFile = False if tableValues is None: return db = tableValues["__infos__"]["db"] if not db: db = "All" table = tableValues["__infos__"]["table"] if hasattr(conf, "api"): self._write(tableValues, content_type=CONTENT_TYPE.DUMP_TABLE) return _ = re.sub(r"[^\w]", "_", normalizeUnicode(unsafeSQLIdentificatorNaming(db))) if len(_) < len(db) or IS_WIN and db.upper() in WINDOWS_RESERVED_NAMES: _ = unicodeencode( re.sub(r"[^\w]", "_", unsafeSQLIdentificatorNaming(db))) dumpDbPath = os.path.join( conf.dumpPath, "%s-%s" % (_, hashlib.md5(unicodeencode(db)).hexdigest()[:8])) warnFile = True else: dumpDbPath = os.path.join(conf.dumpPath, _) if conf.dumpFormat == DUMP_FORMAT.SQLITE: replication = Replication( os.path.join(conf.dumpPath, "%s.sqlite3" % unsafeSQLIdentificatorNaming(db))) elif conf.dumpFormat in (DUMP_FORMAT.CSV, DUMP_FORMAT.HTML): if not os.path.isdir(dumpDbPath): try: os.makedirs(dumpDbPath, 0755) except (OSError, IOError), ex: try: tempDir = tempfile.mkdtemp(prefix="sqlmapdb") except IOError, _: errMsg = "unable to write to the temporary directory ('%s'). " % _ errMsg += "Please make sure that your disk is not full and " errMsg += "that you have sufficient write permissions to " errMsg += "create temporary files and/or directories" raise SqlmapSystemException(errMsg) warnMsg = "unable to create dump directory " warnMsg += "'%s' (%s). " % (dumpDbPath, ex) warnMsg += "Using temporary directory '%s' instead" % tempDir logger.warn(warnMsg) dumpDbPath = tempDir
def dbTableValues(self, tableValues): replication = None rtable = None dumpFP = None appendToFile = False warnFile = False if tableValues is None: return db = tableValues["__infos__"]["db"] if not db: db = "All" table = tableValues["__infos__"]["table"] if hasattr(conf, "api"): self._write(tableValues, content_type=CONTENT_TYPE.DUMP_TABLE) return dumpDbPath = os.path.join(conf.dumpPath, unsafeSQLIdentificatorNaming(db)) if conf.dumpFormat == DUMP_FORMAT.SQLITE: replication = Replication(os.path.join(conf.dumpPath, "%s.sqlite3" % unsafeSQLIdentificatorNaming(db))) elif conf.dumpFormat in (DUMP_FORMAT.CSV, DUMP_FORMAT.HTML): if not os.path.isdir(dumpDbPath): try: os.makedirs(dumpDbPath, 0755) except: warnFile = True _ = unicodeencode(re.sub(r"[^\w]", "_", unsafeSQLIdentificatorNaming(db))) dumpDbPath = os.path.join( conf.dumpPath, "%s-%s" % (_, hashlib.md5(unicodeencode(db)).hexdigest()[:8]) ) if not os.path.isdir(dumpDbPath): try: os.makedirs(dumpDbPath, 0755) except Exception, ex: try: tempDir = tempfile.mkdtemp(prefix="sqlmapdb") except IOError, _: errMsg = "unable to write to the temporary directory ('%s'). " % _ errMsg += "Please make sure that your disk is not full and " errMsg += "that you have sufficient write permissions to " errMsg += "create temporary files and/or directories" raise SqlmapSystemException(errMsg) warnMsg = "unable to create dump directory " warnMsg += "'%s' (%s). " % (dumpDbPath, getSafeExString(ex)) warnMsg += "Using temporary directory '%s' instead" % tempDir logger.warn(warnMsg) dumpDbPath = tempDir
def getPage(**kwargs): """ This method connects to the target URL or proxy and returns the target URL page content """ if conf.delay is not None and isinstance( conf.delay, (int, float)) and conf.delay > 0: time.sleep(conf.delay) elif conf.cpuThrottle: cpuThrottle(conf.cpuThrottle) if conf.dummy: return randomStr(int(randomInt()), alphabet=[chr(_) for _ in xrange(256) ]), {}, int(randomInt()) threadData = getCurrentThreadData() with kb.locks.request: kb.requestCounter += 1 threadData.lastRequestUID = kb.requestCounter url = kwargs.get("url", None) or conf.url get = kwargs.get("get", None) post = kwargs.get("post", None) method = kwargs.get("method", None) cookie = kwargs.get("cookie", None) ua = kwargs.get("ua", None) or conf.agent referer = kwargs.get("referer", None) or conf.referer host = kwargs.get("host", None) or conf.host direct_ = kwargs.get("direct", False) multipart = kwargs.get("multipart", False) silent = kwargs.get("silent", False) raise404 = kwargs.get("raise404", True) timeout = kwargs.get("timeout", None) or conf.timeout auxHeaders = kwargs.get("auxHeaders", None) response = kwargs.get("response", False) ignoreTimeout = kwargs.get("ignoreTimeout", False) or kb.ignoreTimeout refreshing = kwargs.get("refreshing", False) retrying = kwargs.get("retrying", False) crawling = kwargs.get("crawling", False) skipRead = kwargs.get("skipRead", False) if not urlparse.urlsplit(url).netloc: url = urlparse.urljoin(conf.url, url) # flag to know if we are dealing with the same target host target = reduce( lambda x, y: x == y, map(lambda x: urlparse.urlparse(x).netloc.split(':')[0], [url, conf.url or ""])) if not retrying: # Reset the number of connection retries threadData.retriesCount = 0 # fix for known issue when urllib2 just skips the other part of provided # url splitted with space char while urlencoding it in the later phase url = url.replace(" ", "%20") conn = None code = None page = None _ = urlparse.urlsplit(url) requestMsg = u"HTTP request [#%d]:\n%s " % ( threadData.lastRequestUID, method or (HTTPMETHOD.POST if post is not None else HTTPMETHOD.GET)) requestMsg += ("%s%s" % (_.path or "/", ("?%s" % _.query) if _.query else "")) if not any( (refreshing, crawling)) else url responseMsg = u"HTTP response " requestHeaders = u"" responseHeaders = None logHeaders = u"" skipLogTraffic = False raise404 = raise404 and not kb.ignoreNotFound # support for non-latin (e.g. cyrillic) URLs as urllib/urllib2 doesn't # support those by default url = asciifyUrl(url) # fix for known issues when using url in unicode format # (e.g. UnicodeDecodeError: "url = url + '?' + query" in redirect case) url = unicodeencode(url) try: socket.setdefaulttimeout(timeout) if direct_: if '?' in url: url, params = url.split('?', 1) params = urlencode(params) url = "%s?%s" % (url, params) requestMsg += "?%s" % params elif multipart: # Needed in this form because of potential circle dependency # problem (option -> update -> connect -> option) from lib.core.option import proxyHandler multipartOpener = urllib2.build_opener( proxyHandler, multipartpost.MultipartPostHandler) conn = multipartOpener.open(unicodeencode(url), multipart) page = Connect._connReadProxy(conn) if not skipRead else None responseHeaders = conn.info() responseHeaders[URI_HTTP_HEADER] = conn.geturl() page = decodePage( page, responseHeaders.get(HTTP_HEADER.CONTENT_ENCODING), responseHeaders.get(HTTP_HEADER.CONTENT_TYPE)) return page elif any((refreshing, crawling)): pass elif target: if conf.forceSSL and urlparse.urlparse(url).scheme != "https": url = re.sub("\Ahttp:", "https:", url, re.I) url = re.sub(":80/", ":443/", url, re.I) if PLACE.GET in conf.parameters and not get: get = conf.parameters[PLACE.GET] if not conf.skipUrlEncode: get = urlencode(get, limit=True) if get: url = "%s?%s" % (url, get) requestMsg += "?%s" % get if PLACE.POST in conf.parameters and not post and method in ( None, HTTPMETHOD.POST): post = conf.parameters[PLACE.POST] elif get: url = "%s?%s" % (url, get) requestMsg += "?%s" % get requestMsg += " %s" % httplib.HTTPConnection._http_vsn_str # Prepare HTTP headers headers = forgeHeaders({ HTTP_HEADER.COOKIE: cookie, HTTP_HEADER.USER_AGENT: ua, HTTP_HEADER.REFERER: referer }) if kb.authHeader: headers[HTTP_HEADER.AUTHORIZATION] = kb.authHeader if kb.proxyAuthHeader: headers[HTTP_HEADER.PROXY_AUTHORIZATION] = kb.proxyAuthHeader headers[HTTP_HEADER.ACCEPT] = HTTP_ACCEPT_HEADER_VALUE headers[ HTTP_HEADER. ACCEPT_ENCODING] = HTTP_ACCEPT_ENCODING_HEADER_VALUE if kb.pageCompress else "identity" headers[HTTP_HEADER.HOST] = host or getHostHeader(url) if post is not None and HTTP_HEADER.CONTENT_TYPE not in headers: headers[ HTTP_HEADER.CONTENT_TYPE] = POST_HINT_CONTENT_TYPES.get( kb.postHint, DEFAULT_CONTENT_TYPE) if headers.get( HTTP_HEADER.CONTENT_TYPE) == POST_HINT_CONTENT_TYPES[ POST_HINT.MULTIPART]: warnMsg = "missing 'boundary parameter' in '%s' header. " % HTTP_HEADER.CONTENT_TYPE warnMsg += "Will try to reconstruct" singleTimeWarnMessage(warnMsg) boundary = findMultipartPostBoundary(conf.data) if boundary: headers[HTTP_HEADER.CONTENT_TYPE] = "%s; boundary=%s" % ( headers[HTTP_HEADER.CONTENT_TYPE], boundary) if auxHeaders: for key, item in auxHeaders.items(): headers[key] = item for key, item in headers.items(): del headers[key] headers[unicodeencode(key, kb.pageEncoding)] = unicodeencode( item, kb.pageEncoding) post = unicodeencode(post, kb.pageEncoding) if method: req = MethodRequest(url, post, headers) req.set_method(method) else: req = urllib2.Request(url, post, headers) requestHeaders += "\n".join( "%s: %s" % (key.capitalize() if isinstance(key, basestring) else key, getUnicode(value)) for (key, value) in req.header_items()) if not getRequestHeader(req, HTTP_HEADER.COOKIE) and conf.cj: conf.cj._policy._now = conf.cj._now = int(time.time()) cookies = conf.cj._cookies_for_request(req) requestHeaders += "\n%s" % ("Cookie: %s" % ";".join( "%s=%s" % (getUnicode(cookie.name), getUnicode(cookie.value)) for cookie in cookies)) if post is not None: if not getRequestHeader(req, HTTP_HEADER.CONTENT_LENGTH): requestHeaders += "\n%s: %d" % (string.capwords( HTTP_HEADER.CONTENT_LENGTH), len(post)) if not getRequestHeader(req, HTTP_HEADER.CONNECTION): requestHeaders += "\n%s: close" % HTTP_HEADER.CONNECTION requestMsg += "\n%s" % requestHeaders if post is not None: requestMsg += "\n\n%s" % getUnicode(post) requestMsg += "\n" threadData.lastRequestMsg = requestMsg logger.log(CUSTOM_LOGGING.TRAFFIC_OUT, requestMsg) conn = urllib2.urlopen(req) if not kb.authHeader and getRequestHeader( req, HTTP_HEADER.AUTHORIZATION ) and conf.authType == AUTH_TYPE.BASIC: kb.authHeader = getRequestHeader(req, HTTP_HEADER.AUTHORIZATION) if not kb.proxyAuthHeader and getRequestHeader( req, HTTP_HEADER.PROXY_AUTHORIZATION): kb.proxyAuthHeader = getRequestHeader( req, HTTP_HEADER.PROXY_AUTHORIZATION) # Return response object if response: return conn, None, None # Get HTTP response if hasattr(conn, 'redurl'): page = (threadData.lastRedirectMsg[1] if kb.redirectChoice == REDIRECTION.NO\ else Connect._connReadProxy(conn)) if not skipRead else None skipLogTraffic = kb.redirectChoice == REDIRECTION.NO code = conn.redcode else: page = Connect._connReadProxy(conn) if not skipRead else None code = code or conn.code responseHeaders = conn.info() responseHeaders[URI_HTTP_HEADER] = conn.geturl() page = decodePage( page, responseHeaders.get(HTTP_HEADER.CONTENT_ENCODING), responseHeaders.get(HTTP_HEADER.CONTENT_TYPE)) status = getUnicode(conn.msg) if extractRegexResult(META_REFRESH_REGEX, page) and not refreshing: url = extractRegexResult(META_REFRESH_REGEX, page) debugMsg = "got HTML meta refresh header" logger.debug(debugMsg) if kb.alwaysRefresh is None: msg = "sqlmap got a refresh request " msg += "(redirect like response common to login pages). " msg += "Do you want to apply the refresh " msg += "from now on (or stay on the original page)? [Y/n]" choice = readInput(msg, default="Y") kb.alwaysRefresh = choice not in ("n", "N") if kb.alwaysRefresh: if url.lower().startswith('http://'): kwargs['url'] = url else: kwargs['url'] = conf.url[:conf.url.rfind('/') + 1] + url threadData.lastRedirectMsg = (threadData.lastRequestUID, page) kwargs['refreshing'] = True kwargs['get'] = None kwargs['post'] = None try: return Connect._getPageProxy(**kwargs) except SqlmapSyntaxException: pass # Explicit closing of connection object if not conf.keepAlive: try: if hasattr(conn.fp, '_sock'): conn.fp._sock.close() conn.close() except Exception, msg: warnMsg = "problem occurred during connection closing ('%s')" % msg logger.warn(warnMsg) except urllib2.HTTPError, e: page = None responseHeaders = None try: page = e.read() if not skipRead else None responseHeaders = e.info() responseHeaders[URI_HTTP_HEADER] = e.geturl() page = decodePage( page, responseHeaders.get(HTTP_HEADER.CONTENT_ENCODING), responseHeaders.get(HTTP_HEADER.CONTENT_TYPE)) except socket.timeout: warnMsg = "connection timed out while trying " warnMsg += "to get error page information (%d)" % e.code logger.warn(warnMsg) return None, None, None except KeyboardInterrupt: raise except: pass finally: page = page if isinstance(page, unicode) else getUnicode(page) code = e.code threadData.lastHTTPError = (threadData.lastRequestUID, code) kb.httpErrorCodes[code] = kb.httpErrorCodes.get(code, 0) + 1 status = getUnicode(e.msg) responseMsg += "[#%d] (%d %s):\n" % (threadData.lastRequestUID, code, status) if responseHeaders: logHeaders = "\n".join("%s: %s" % (getUnicode(key.capitalize( ) if isinstance(key, basestring) else key), getUnicode(value)) for (key, value) in responseHeaders.items()) logHTTPTraffic( requestMsg, "%s%s\n\n%s" % (responseMsg, logHeaders, (page or "")[:MAX_CONNECTION_CHUNK_SIZE])) skipLogTraffic = True if conf.verbose <= 5: responseMsg += getUnicode(logHeaders) elif conf.verbose > 5: responseMsg += "%s\n\n%s" % (logHeaders, (page or "")[:MAX_CONNECTION_CHUNK_SIZE]) logger.log(CUSTOM_LOGGING.TRAFFIC_IN, responseMsg) if e.code == httplib.UNAUTHORIZED: errMsg = "not authorized, try to provide right HTTP " errMsg += "authentication type and valid credentials (%d)" % code raise SqlmapConnectionException(errMsg) elif e.code == httplib.NOT_FOUND: if raise404: errMsg = "page not found (%d)" % code raise SqlmapConnectionException(errMsg) else: debugMsg = "page not found (%d)" % code singleTimeLogMessage(debugMsg, logging.DEBUG) processResponse(page, responseHeaders) elif e.code == httplib.GATEWAY_TIMEOUT: if ignoreTimeout: return None, None, None else: warnMsg = "unable to connect to the target URL (%d - %s)" % ( e.code, httplib.responses[e.code]) if threadData.retriesCount < conf.retries and not kb.threadException: warnMsg += ". sqlmap is going to retry the request" logger.critical(warnMsg) return Connect._retryProxy(**kwargs) elif kb.testMode: logger.critical(warnMsg) return None, None, None else: raise SqlmapConnectionException(warnMsg) else: debugMsg = "got HTTP error code: %d (%s)" % (code, status) logger.debug(debugMsg)
def getPage(**kwargs): """ This method connects to the target URL or proxy and returns the target URL page content """ if isinstance(conf.delay, (int, float)) and conf.delay > 0: time.sleep(conf.delay) elif conf.cpuThrottle: cpuThrottle(conf.cpuThrottle) if conf.dummy: return randomStr(int(randomInt()), alphabet=[chr(_) for _ in xrange(256)]), {}, int(randomInt()) threadData = getCurrentThreadData() with kb.locks.request: kb.requestCounter += 1 threadData.lastRequestUID = kb.requestCounter url = kwargs.get("url", None) or conf.url get = kwargs.get("get", None) post = kwargs.get("post", None) method = kwargs.get("method", None) cookie = kwargs.get("cookie", None) ua = kwargs.get("ua", None) or conf.agent referer = kwargs.get("referer", None) or conf.referer host = kwargs.get("host", None) or conf.host direct_ = kwargs.get("direct", False) multipart = kwargs.get("multipart", False) silent = kwargs.get("silent", False) raise404 = kwargs.get("raise404", True) timeout = kwargs.get("timeout", None) or conf.timeout auxHeaders = kwargs.get("auxHeaders", None) response = kwargs.get("response", False) ignoreTimeout = kwargs.get("ignoreTimeout", False) or kb.ignoreTimeout refreshing = kwargs.get("refreshing", False) retrying = kwargs.get("retrying", False) crawling = kwargs.get("crawling", False) skipRead = kwargs.get("skipRead", False) if not urlparse.urlsplit(url).netloc: url = urlparse.urljoin(conf.url, url) # flag to know if we are dealing with the same target host target = reduce(lambda x, y: x == y, map(lambda x: urlparse.urlparse(x).netloc.split(':')[0], [url, conf.url or ""])) if not retrying: # Reset the number of connection retries threadData.retriesCount = 0 # fix for known issue when urllib2 just skips the other part of provided # url splitted with space char while urlencoding it in the later phase url = url.replace(" ", "%20") conn = None code = None page = None _ = urlparse.urlsplit(url) requestMsg = u"HTTP request [#%d]:\n%s " % (threadData.lastRequestUID, method or (HTTPMETHOD.POST if post is not None else HTTPMETHOD.GET)) requestMsg += ("%s%s" % (_.path or "/", ("?%s" % _.query) if _.query else "")) if not any((refreshing, crawling)) else url responseMsg = u"HTTP response " requestHeaders = u"" responseHeaders = None logHeaders = u"" skipLogTraffic = False raise404 = raise404 and not kb.ignoreNotFound # support for non-latin (e.g. cyrillic) URLs as urllib/urllib2 doesn't # support those by default url = asciifyUrl(url) try: socket.setdefaulttimeout(timeout) if direct_: if '?' in url: url, params = url.split('?', 1) params = urlencode(params) url = "%s?%s" % (url, params) elif multipart: # Needed in this form because of potential circle dependency # problem (option -> update -> connect -> option) from lib.core.option import proxyHandler multipartOpener = urllib2.build_opener(proxyHandler, multipartpost.MultipartPostHandler) conn = multipartOpener.open(unicodeencode(url), multipart) page = Connect._connReadProxy(conn) if not skipRead else None responseHeaders = conn.info() responseHeaders[URI_HTTP_HEADER] = conn.geturl() page = decodePage(page, responseHeaders.get(HTTP_HEADER.CONTENT_ENCODING), responseHeaders.get(HTTP_HEADER.CONTENT_TYPE)) return page elif any((refreshing, crawling)): pass elif target: if conf.forceSSL and urlparse.urlparse(url).scheme != "https": url = re.sub("\Ahttp:", "https:", url, re.I) url = re.sub(":80/", ":443/", url, re.I) if PLACE.GET in conf.parameters and not get: get = conf.parameters[PLACE.GET] if not conf.skipUrlEncode: get = urlencode(get, limit=True) if get: if '?' in url: url = "%s%s%s" % (url, DEFAULT_GET_POST_DELIMITER, get) requestMsg += "%s%s" % (DEFAULT_GET_POST_DELIMITER, get) else: url = "%s?%s" % (url, get) requestMsg += "?%s" % get if PLACE.POST in conf.parameters and not post and method != HTTPMETHOD.GET: post = conf.parameters[PLACE.POST] elif get: url = "%s?%s" % (url, get) requestMsg += "?%s" % get requestMsg += " %s" % httplib.HTTPConnection._http_vsn_str # Prepare HTTP headers headers = forgeHeaders({HTTP_HEADER.COOKIE: cookie, HTTP_HEADER.USER_AGENT: ua, HTTP_HEADER.REFERER: referer, HTTP_HEADER.HOST: host}) if kb.authHeader: headers[HTTP_HEADER.AUTHORIZATION] = kb.authHeader if kb.proxyAuthHeader: headers[HTTP_HEADER.PROXY_AUTHORIZATION] = kb.proxyAuthHeader if HTTP_HEADER.ACCEPT not in headers: headers[HTTP_HEADER.ACCEPT] = HTTP_ACCEPT_HEADER_VALUE if HTTP_HEADER.HOST not in headers: headers[HTTP_HEADER.HOST] = getHostHeader(url) if HTTP_HEADER.ACCEPT_ENCODING not in headers: headers[HTTP_HEADER.ACCEPT_ENCODING] = HTTP_ACCEPT_ENCODING_HEADER_VALUE if kb.pageCompress else "identity" if post is not None and HTTP_HEADER.CONTENT_TYPE not in headers: headers[HTTP_HEADER.CONTENT_TYPE] = POST_HINT_CONTENT_TYPES.get(kb.postHint, DEFAULT_CONTENT_TYPE) if headers.get(HTTP_HEADER.CONTENT_TYPE) == POST_HINT_CONTENT_TYPES[POST_HINT.MULTIPART]: warnMsg = "missing 'boundary parameter' in '%s' header. " % HTTP_HEADER.CONTENT_TYPE warnMsg += "Will try to reconstruct" singleTimeWarnMessage(warnMsg) boundary = findMultipartPostBoundary(conf.data) if boundary: headers[HTTP_HEADER.CONTENT_TYPE] = "%s; boundary=%s" % (headers[HTTP_HEADER.CONTENT_TYPE], boundary) if auxHeaders: for key, item in auxHeaders.items(): for _ in headers.keys(): if _.upper() == key.upper(): del headers[_] headers[key] = item for key, item in headers.items(): del headers[key] headers[unicodeencode(key, kb.pageEncoding)] = unicodeencode(item, kb.pageEncoding) url = unicodeencode(url) post = unicodeencode(post, kb.pageEncoding) if method and method not in (HTTPMETHOD.GET, HTTPMETHOD.POST): method = unicodeencode(method) req = MethodRequest(url, post, headers) req.set_method(method) else: req = urllib2.Request(url, post, headers) requestHeaders += "\n".join("%s: %s" % (getUnicode(key.capitalize() if isinstance(key, basestring) else key), getUnicode(value)) for (key, value) in req.header_items()) if not getRequestHeader(req, HTTP_HEADER.COOKIE) and conf.cj: conf.cj._policy._now = conf.cj._now = int(time.time()) cookies = conf.cj._cookies_for_request(req) requestHeaders += "\n%s" % ("Cookie: %s" % ";".join("%s=%s" % (getUnicode(cookie.name), getUnicode(cookie.value)) for cookie in cookies)) if post is not None: if not getRequestHeader(req, HTTP_HEADER.CONTENT_LENGTH): requestHeaders += "\n%s: %d" % (string.capwords(HTTP_HEADER.CONTENT_LENGTH), len(post)) if not getRequestHeader(req, HTTP_HEADER.CONNECTION): requestHeaders += "\n%s: close" % HTTP_HEADER.CONNECTION requestMsg += "\n%s" % requestHeaders if post is not None: requestMsg += "\n\n%s" % getUnicode(post) requestMsg += "\n" threadData.lastRequestMsg = requestMsg logger.log(CUSTOM_LOGGING.TRAFFIC_OUT, requestMsg) conn = urllib2.urlopen(req) if not kb.authHeader and getRequestHeader(req, HTTP_HEADER.AUTHORIZATION) and (conf.authType or "").lower() == AUTH_TYPE.BASIC.lower(): kb.authHeader = getRequestHeader(req, HTTP_HEADER.AUTHORIZATION) if not kb.proxyAuthHeader and getRequestHeader(req, HTTP_HEADER.PROXY_AUTHORIZATION): kb.proxyAuthHeader = getRequestHeader(req, HTTP_HEADER.PROXY_AUTHORIZATION) # Return response object if response: return conn, None, None # Get HTTP response if hasattr(conn, 'redurl'): page = (threadData.lastRedirectMsg[1] if kb.redirectChoice == REDIRECTION.NO\ else Connect._connReadProxy(conn)) if not skipRead else None skipLogTraffic = kb.redirectChoice == REDIRECTION.NO code = conn.redcode else: page = Connect._connReadProxy(conn) if not skipRead else None code = code or conn.code responseHeaders = conn.info() responseHeaders[URI_HTTP_HEADER] = conn.geturl() page = decodePage(page, responseHeaders.get(HTTP_HEADER.CONTENT_ENCODING), responseHeaders.get(HTTP_HEADER.CONTENT_TYPE)) status = getUnicode(conn.msg) if extractRegexResult(META_REFRESH_REGEX, page) and not refreshing: url = extractRegexResult(META_REFRESH_REGEX, page) debugMsg = "got HTML meta refresh header" logger.debug(debugMsg) if kb.alwaysRefresh is None: msg = "sqlmap got a refresh request " msg += "(redirect like response common to login pages). " msg += "Do you want to apply the refresh " msg += "from now on (or stay on the original page)? [Y/n]" choice = readInput(msg, default="Y") kb.alwaysRefresh = choice not in ("n", "N") if kb.alwaysRefresh: if url.lower().startswith('http://'): kwargs['url'] = url else: kwargs['url'] = conf.url[:conf.url.rfind('/') + 1] + url threadData.lastRedirectMsg = (threadData.lastRequestUID, page) kwargs['refreshing'] = True kwargs['get'] = None kwargs['post'] = None try: return Connect._getPageProxy(**kwargs) except SqlmapSyntaxException: pass # Explicit closing of connection object if not conf.keepAlive: try: if hasattr(conn.fp, '_sock'): conn.fp._sock.close() conn.close() except Exception, msg: warnMsg = "problem occurred during connection closing ('%s')" % msg logger.warn(warnMsg) except urllib2.HTTPError, e: page = None responseHeaders = None try: page = e.read() if not skipRead else None responseHeaders = e.info() responseHeaders[URI_HTTP_HEADER] = e.geturl() page = decodePage(page, responseHeaders.get(HTTP_HEADER.CONTENT_ENCODING), responseHeaders.get(HTTP_HEADER.CONTENT_TYPE)) except socket.timeout: warnMsg = "connection timed out while trying " warnMsg += "to get error page information (%d)" % e.code logger.warn(warnMsg) return None, None, None except KeyboardInterrupt: raise except: pass finally: page = page if isinstance(page, unicode) else getUnicode(page) code = e.code kb.originalCode = kb.originalCode or code threadData.lastHTTPError = (threadData.lastRequestUID, code) kb.httpErrorCodes[code] = kb.httpErrorCodes.get(code, 0) + 1 status = getUnicode(e.msg) responseMsg += "[#%d] (%d %s):\n" % (threadData.lastRequestUID, code, status) if responseHeaders: logHeaders = "\n".join("%s: %s" % (getUnicode(key.capitalize() if isinstance(key, basestring) else key), getUnicode(value)) for (key, value) in responseHeaders.items()) logHTTPTraffic(requestMsg, "%s%s\n\n%s" % (responseMsg, logHeaders, (page or "")[:MAX_CONNECTION_CHUNK_SIZE])) skipLogTraffic = True if conf.verbose <= 5: responseMsg += getUnicode(logHeaders) elif conf.verbose > 5: responseMsg += "%s\n\n%s" % (logHeaders, (page or "")[:MAX_CONNECTION_CHUNK_SIZE]) logger.log(CUSTOM_LOGGING.TRAFFIC_IN, responseMsg) if e.code == httplib.UNAUTHORIZED and not conf.ignore401: errMsg = "not authorized, try to provide right HTTP " errMsg += "authentication type and valid credentials (%d)" % code raise SqlmapConnectionException(errMsg) elif e.code == httplib.NOT_FOUND: if raise404: errMsg = "page not found (%d)" % code raise SqlmapConnectionException(errMsg) else: debugMsg = "page not found (%d)" % code singleTimeLogMessage(debugMsg, logging.DEBUG) processResponse(page, responseHeaders) elif e.code == httplib.GATEWAY_TIMEOUT: if ignoreTimeout: return None, None, None else: warnMsg = "unable to connect to the target URL (%d - %s)" % (e.code, httplib.responses[e.code]) if threadData.retriesCount < conf.retries and not kb.threadException: warnMsg += ". sqlmap is going to retry the request" logger.critical(warnMsg) return Connect._retryProxy(**kwargs) elif kb.testMode: logger.critical(warnMsg) return None, None, None else: raise SqlmapConnectionException(warnMsg) else: debugMsg = "got HTTP error code: %d (%s)" % (code, status) logger.debug(debugMsg)
def getPage(**kwargs): """ This method connects to the target url or proxy and returns the target url page content """ if conf.delay is not None and isinstance( conf.delay, (int, float)) and conf.delay > 0: time.sleep(conf.delay) elif conf.cpuThrottle: cpuThrottle(conf.cpuThrottle) threadData = getCurrentThreadData() threadData.lastRequestUID += 1 url = kwargs.get('url', conf.url) get = kwargs.get('get', None) post = kwargs.get('post', None) method = kwargs.get('method', None) cookie = kwargs.get('cookie', None) ua = kwargs.get('ua', None) referer = kwargs.get('referer', None) host = kwargs.get('host', conf.host) direct = kwargs.get('direct', False) multipart = kwargs.get('multipart', False) silent = kwargs.get('silent', False) raise404 = kwargs.get('raise404', True) auxHeaders = kwargs.get('auxHeaders', None) response = kwargs.get('response', False) ignoreTimeout = kwargs.get('ignoreTimeout', kb.ignoreTimeout) refreshing = kwargs.get('refreshing', False) retrying = kwargs.get('retrying', False) crawling = kwargs.get('crawling', False) if not urlparse.urlsplit(url).netloc: url = urlparse.urljoin(conf.url, url) # flag to know if we are dealing with the same target host target = reduce( lambda x, y: x == y, map(lambda x: urlparse.urlparse(x).netloc.split(':')[0], [url, conf.url or ""])) if not retrying: # Reset the number of connection retries threadData.retriesCount = 0 # fix for known issue when urllib2 just skips the other part of provided # url splitted with space char while urlencoding it in the later phase url = url.replace(" ", "%20") code = None page = None requestMsg = u"HTTP request [#%d]:\n%s " % ( threadData.lastRequestUID, method or (HTTPMETHOD.POST if post else HTTPMETHOD.GET)) requestMsg += ("%s" % urlparse.urlsplit(url)[2] or "/") if not any( (refreshing, crawling)) else url responseMsg = u"HTTP response " requestHeaders = u"" responseHeaders = None logHeaders = u"" skipLogTraffic = False raise404 = raise404 and not kb.ignoreNotFound # support for non-latin (e.g. cyrillic) URLs as urllib/urllib2 doesn't # support those by default url = asciifyUrl(url) # fix for known issues when using url in unicode format # (e.g. UnicodeDecodeError: "url = url + '?' + query" in redirect case) url = unicodeencode(url) try: if silent: socket.setdefaulttimeout(HTTP_SILENT_TIMEOUT) else: socket.setdefaulttimeout(conf.timeout) if direct: if "?" in url: url, params = url.split("?") params = urlencode(params) url = "%s?%s" % (url, params) requestMsg += "?%s" % params elif multipart: # Needed in this form because of potential circle dependency # problem (option -> update -> connect -> option) from lib.core.option import proxyHandler multipartOpener = urllib2.build_opener( proxyHandler, multipartpost.MultipartPostHandler) conn = multipartOpener.open(unicodeencode(url), multipart) page = Connect.__connReadProxy(conn) responseHeaders = conn.info() responseHeaders[URI_HTTP_HEADER] = conn.geturl() page = decodePage( page, responseHeaders.get(HTTPHEADER.CONTENT_ENCODING), responseHeaders.get(HTTPHEADER.CONTENT_TYPE)) return page elif any((refreshing, crawling)): pass elif target: if PLACE.GET in conf.parameters and not get: get = conf.parameters[PLACE.GET] if get: url = "%s?%s" % (url, get) requestMsg += "?%s" % get if conf.method == HTTPMETHOD.POST and not post: for place in (PLACE.POST, PLACE.SOAP): if place in conf.parameters: post = conf.parameters[place] break elif get: url = "%s?%s" % (url, get) requestMsg += "?%s" % get requestMsg += " %s" % httplib.HTTPConnection._http_vsn_str # Prepare HTTP headers headers = forgeHeaders({ HTTPHEADER.COOKIE: cookie, HTTPHEADER.USER_AGENT: ua, HTTPHEADER.REFERER: referer }) if kb.authHeader: headers[HTTPHEADER.AUTHORIZATION] = kb.authHeader if kb.proxyAuthHeader: headers[HTTPHEADER.PROXY_AUTHORIZATION] = kb.proxyAuthHeader headers[HTTPHEADER.ACCEPT] = HTTP_ACCEPT_HEADER_VALUE headers[ HTTPHEADER. ACCEPT_ENCODING] = HTTP_ACCEPT_ENCODING_HEADER_VALUE if method != HTTPMETHOD.HEAD else "identity" headers[HTTPHEADER.HOST] = host or getHostHeader(url) if auxHeaders: for key, item in auxHeaders.items(): headers[key] = item for key, item in headers.items(): del headers[key] headers[unicodeencode(key, kb.pageEncoding)] = unicodeencode( item, kb.pageEncoding) post = unicodeencode(post, kb.pageEncoding) if method: req = MethodRequest(url, post, headers) req.set_method(method) else: req = urllib2.Request(url, post, headers) requestHeaders += "\n".join( "%s: %s" % (key.capitalize() if isinstance(key, basestring) else key, getUnicode(value)) for (key, value) in req.header_items()) if not getRequestHeader(req, HTTPHEADER.COOKIE) and conf.cj: conf.cj._policy._now = conf.cj._now = int(time.time()) cookies = conf.cj._cookies_for_request(req) requestHeaders += "\n%s" % ("Cookie: %s" % ";".join( "%s=%s" % (getUnicode(cookie.name), getUnicode(cookie.value)) for cookie in cookies)) if post: if not getRequestHeader(req, HTTPHEADER.CONTENT_TYPE): requestHeaders += "\n%s: %s" % ( string.capwords(HTTPHEADER.CONTENT_TYPE), "application/x-www-form-urlencoded") if not getRequestHeader(req, HTTPHEADER.CONTENT_LENGTH): requestHeaders += "\n%s: %d" % (string.capwords( HTTPHEADER.CONTENT_LENGTH), len(post)) if not getRequestHeader(req, HTTPHEADER.CONNECTION): requestHeaders += "\n%s: close" % HTTPHEADER.CONNECTION requestMsg += "\n%s" % requestHeaders if post: requestMsg += "\n\n%s" % getUnicode(post) requestMsg += "\n" threadData.lastRequestMsg = requestMsg logger.log(CUSTOM_LOGGING.TRAFFIC_OUT, requestMsg) conn = urllib2.urlopen(req) if not kb.authHeader and getRequestHeader( req, HTTPHEADER.AUTHORIZATION): kb.authHeader = getRequestHeader(req, HTTPHEADER.AUTHORIZATION) if not kb.proxyAuthHeader and getRequestHeader( req, HTTPHEADER.PROXY_AUTHORIZATION): kb.proxyAuthHeader = getRequestHeader( req, HTTPHEADER.PROXY_AUTHORIZATION) # Return response object if response: return conn, None, None # Get HTTP response if hasattr(conn, 'redurl'): page = threadData.lastRedirectMsg[1] if kb.redirectChoice == REDIRECTION.NO\ else Connect.__connReadProxy(conn) skipLogTraffic = kb.redirectChoice == REDIRECTION.NO code = conn.redcode else: page = Connect.__connReadProxy(conn) code = code or conn.code responseHeaders = conn.info() responseHeaders[URI_HTTP_HEADER] = conn.geturl() page = decodePage(page, responseHeaders.get(HTTPHEADER.CONTENT_ENCODING), responseHeaders.get(HTTPHEADER.CONTENT_TYPE)) status = getUnicode(conn.msg) if extractRegexResult(META_REFRESH_REGEX, page, re.DOTALL | re.IGNORECASE) and not refreshing: url = extractRegexResult(META_REFRESH_REGEX, page, re.DOTALL | re.IGNORECASE) debugMsg = "got HTML meta refresh header" logger.debug(debugMsg) if kb.alwaysRefresh is None: msg = "sqlmap got a refresh request " msg += "(redirect like response common to login pages). " msg += "Do you want to apply the refresh " msg += "from now on (or stay on the original page)? [Y/n]" choice = readInput(msg, default="Y") kb.alwaysRefresh = choice not in ("n", "N") if kb.alwaysRefresh: if url.lower().startswith('http://'): kwargs['url'] = url else: kwargs['url'] = conf.url[:conf.url.rfind('/') + 1] + url threadData.lastRedirectMsg = (threadData.lastRequestUID, page) kwargs['refreshing'] = True kwargs['get'] = None kwargs['post'] = None try: return Connect.__getPageProxy(**kwargs) except sqlmapSyntaxException: pass # Explicit closing of connection object if not conf.keepAlive: try: if hasattr(conn.fp, '_sock'): conn.fp._sock.close() conn.close() except Exception, msg: warnMsg = "problem occured during connection closing ('%s')" % msg logger.warn(warnMsg) except urllib2.HTTPError, e: page = None responseHeaders = None try: page = e.read() responseHeaders = e.info() responseHeaders[URI_HTTP_HEADER] = e.geturl() page = decodePage( page, responseHeaders.get(HTTPHEADER.CONTENT_ENCODING), responseHeaders.get(HTTPHEADER.CONTENT_TYPE)) except socket.timeout: warnMsg = "connection timed out while trying " warnMsg += "to get error page information (%d)" % e.code logger.warn(warnMsg) return None, None, None except KeyboardInterrupt: raise except: pass finally: page = page if isinstance(page, unicode) else getUnicode(page) code = e.code threadData.lastHTTPError = (threadData.lastRequestUID, code) kb.httpErrorCodes[code] = kb.httpErrorCodes.get(code, 0) + 1 status = getUnicode(e.msg) responseMsg += "[#%d] (%d %s):\n" % (threadData.lastRequestUID, code, status) if responseHeaders: logHeaders = "\n".join( "%s: %s" % (key.capitalize() if isinstance(key, basestring) else key, getUnicode(value)) for (key, value) in responseHeaders.items()) logHTTPTraffic( requestMsg, "%s%s\n\n%s" % (responseMsg, logHeaders, (page or "")[:MAX_CONNECTION_CHUNK_SIZE])) skipLogTraffic = True if conf.verbose <= 5: responseMsg += getUnicode(logHeaders) elif conf.verbose > 5: responseMsg += "%s\n\n%s" % (logHeaders, (page or "")[:MAX_CONNECTION_CHUNK_SIZE]) logger.log(CUSTOM_LOGGING.TRAFFIC_IN, responseMsg) if e.code == httplib.UNAUTHORIZED: errMsg = "not authorized, try to provide right HTTP " errMsg += "authentication type and valid credentials (%d)" % code raise sqlmapConnectionException, errMsg elif e.code == httplib.NOT_FOUND: if raise404: errMsg = "page not found (%d)" % code raise sqlmapConnectionException, errMsg else: debugMsg = "page not found (%d)" % code logger.debug(debugMsg) processResponse(page, responseHeaders) elif e.code == httplib.GATEWAY_TIMEOUT: if ignoreTimeout: return None, None, None else: warnMsg = "unable to connect to the target url (%d - %s)" % ( e.code, httplib.responses[e.code]) if threadData.retriesCount < conf.retries and not kb.threadException: warnMsg += ", sqlmap is going to retry the request" logger.critical(warnMsg) return Connect.__retryProxy(**kwargs) elif kb.testMode: logger.critical(warnMsg) return None, None, None else: raise sqlmapConnectionException, warnMsg else: debugMsg = "got HTTP error code: %d (%s)" % (code, status) logger.debug(debugMsg)
errMsg += "that you have sufficient write permissions to " errMsg += "create temporary files and/or directories" raise SqlmapSystemException(errMsg) warnMsg = "unable to create dump directory " warnMsg += "'%s' (%s). " % (dumpDbPath, ex) warnMsg += "Using temporary directory '%s' instead" % tempDir logger.warn(warnMsg) dumpDbPath = tempDir _ = re.sub(r"[^\w]", "_", normalizeUnicode(unsafeSQLIdentificatorNaming(table))) if len(_) < len(table) or IS_WIN and table.upper( ) in WINDOWS_RESERVED_NAMES: _ = unicodeencode( re.sub(r"[^\w]", "_", unsafeSQLIdentificatorNaming(table))) dumpFileName = os.path.join( dumpDbPath, "%s-%s.%s" % (_, hashlib.md5(unicodeencode(table)).hexdigest()[:8], conf.dumpFormat.lower())) warnFile = True else: dumpFileName = os.path.join( dumpDbPath, "%s.%s" % (_, conf.dumpFormat.lower())) appendToFile = os.path.isfile(dumpFileName) and any( (conf.limitStart, conf.limitStop)) dumpFP = openFile(dumpFileName, "wb" if not appendToFile else "ab") count = int(tableValues["__infos__"]["count"]) separator = str()
conf.dumpFormat.lower()))) if not checkFile(dumpFileName, False): try: openFile(dumpFileName, "w+b").close() except SqlmapSystemException: raise except: warnFile = True _ = re.sub( r"[^\w]", UNSAFE_DUMP_FILEPATH_REPLACEMENT, normalizeUnicode(unsafeSQLIdentificatorNaming(table))) if len(_) < len(table) or IS_WIN and table.upper( ) in WINDOWS_RESERVED_NAMES: _ = unicodeencode( re.sub(r"[^\w]", UNSAFE_DUMP_FILEPATH_REPLACEMENT, unsafeSQLIdentificatorNaming(table))) dumpFileName = os.path.join( dumpDbPath, "%s-%s.%s" % (_, hashlib.md5( unicodeencode(table)).hexdigest()[:8], conf.dumpFormat.lower())) else: dumpFileName = os.path.join( dumpDbPath, "%s.%s" % (_, conf.dumpFormat.lower())) else: appendToFile = any((conf.limitStart, conf.limitStop)) if not appendToFile: count = 1 while True:
logger.warn(warnMsg) dumpDbPath = tempDir dumpFileName = os.path.join(dumpDbPath, "%s.%s" % (unsafeSQLIdentificatorNaming(table), conf.dumpFormat.lower())) if not checkFile(dumpFileName, False): try: openFile(dumpFileName, "w+b").close() except SqlmapSystemException: raise except: warnFile = True _ = re.sub(r"[^\w]", "_", normalizeUnicode(unsafeSQLIdentificatorNaming(table))) if len(_) < len(table) or IS_WIN and table.upper() in WINDOWS_RESERVED_NAMES: _ = unicodeencode(re.sub(r"[^\w]", "_", unsafeSQLIdentificatorNaming(table))) dumpFileName = os.path.join(dumpDbPath, "%s-%s.%s" % (_, hashlib.md5(unicodeencode(table)).hexdigest()[:8], conf.dumpFormat.lower())) else: dumpFileName = os.path.join(dumpDbPath, "%s.%s" % (_, conf.dumpFormat.lower())) else: appendToFile = any((conf.limitStart, conf.limitStop)) if not appendToFile: count = 1 while True: candidate = "%s.%d" % (dumpFileName, count) if not checkFile(candidate, False): try: shutil.copyfile(dumpFileName, candidate) except IOError: pass
def getPage(**kwargs): """ This method connects to the target url or proxy and returns the target url page content """ if conf.delay is not None and isinstance(conf.delay, (int, float)) and conf.delay > 0: time.sleep(conf.delay) elif conf.cpuThrottle: cpuThrottle(conf.cpuThrottle) threadData = getCurrentThreadData() threadData.lastRequestUID += 1 url = kwargs.get('url', conf.url) get = kwargs.get('get', None) post = kwargs.get('post', None) method = kwargs.get('method', None) cookie = kwargs.get('cookie', None) ua = kwargs.get('ua', None) referer = kwargs.get('referer', None) host = kwargs.get('host', conf.host) direct = kwargs.get('direct', False) multipart = kwargs.get('multipart', False) silent = kwargs.get('silent', False) raise404 = kwargs.get('raise404', True) auxHeaders = kwargs.get('auxHeaders', None) response = kwargs.get('response', False) ignoreTimeout = kwargs.get('ignoreTimeout', kb.ignoreTimeout) refreshing = kwargs.get('refreshing', False) retrying = kwargs.get('retrying', False) crawling = kwargs.get('crawling', False) if not urlparse.urlsplit(url).netloc: url = urlparse.urljoin(conf.url, url) # flag to know if we are dealing with the same target host target = reduce(lambda x, y: x == y, map(lambda x: urlparse.urlparse(x).netloc.split(':')[0], [url, conf.url or ""])) if not retrying: # Reset the number of connection retries threadData.retriesCount = 0 # fix for known issue when urllib2 just skips the other part of provided # url splitted with space char while urlencoding it in the later phase url = url.replace(" ", "%20") code = None page = None requestMsg = u"HTTP request [#%d]:\n%s " % (threadData.lastRequestUID, method or (HTTPMETHOD.POST if post else HTTPMETHOD.GET)) requestMsg += ("%s" % urlparse.urlsplit(url)[2] or "/") if not any((refreshing, crawling)) else url responseMsg = u"HTTP response " requestHeaders = u"" responseHeaders = None logHeaders = u"" skipLogTraffic = False raise404 = raise404 and not kb.ignoreNotFound # support for non-latin (e.g. cyrillic) URLs as urllib/urllib2 doesn't # support those by default url = asciifyUrl(url) # fix for known issues when using url in unicode format # (e.g. UnicodeDecodeError: "url = url + '?' + query" in redirect case) url = unicodeencode(url) try: if silent: socket.setdefaulttimeout(HTTP_SILENT_TIMEOUT) else: socket.setdefaulttimeout(conf.timeout) if direct: if "?" in url: url, params = url.split("?") params = urlencode(params) url = "%s?%s" % (url, params) requestMsg += "?%s" % params elif multipart: # Needed in this form because of potential circle dependency # problem (option -> update -> connect -> option) from lib.core.option import proxyHandler multipartOpener = urllib2.build_opener(proxyHandler, multipartpost.MultipartPostHandler) conn = multipartOpener.open(unicodeencode(url), multipart) page = Connect.__connReadProxy(conn) responseHeaders = conn.info() responseHeaders[URI_HTTP_HEADER] = conn.geturl() page = decodePage(page, responseHeaders.get(HTTPHEADER.CONTENT_ENCODING), responseHeaders.get(HTTPHEADER.CONTENT_TYPE)) return page elif any((refreshing, crawling)): pass elif target: if PLACE.GET in conf.parameters and not get: get = conf.parameters[PLACE.GET] if get: url = "%s?%s" % (url, get) requestMsg += "?%s" % get if conf.method == HTTPMETHOD.POST and not post: for place in (PLACE.POST, PLACE.SOAP): if place in conf.parameters: post = conf.parameters[place] break elif get: url = "%s?%s" % (url, get) requestMsg += "?%s" % get requestMsg += " %s" % httplib.HTTPConnection._http_vsn_str # Prepare HTTP headers headers = forgeHeaders({HTTPHEADER.COOKIE: cookie, HTTPHEADER.USER_AGENT: ua, HTTPHEADER.REFERER: referer}) if kb.authHeader: headers[HTTPHEADER.AUTHORIZATION] = kb.authHeader if kb.proxyAuthHeader: headers[HTTPHEADER.PROXY_AUTHORIZATION] = kb.proxyAuthHeader headers[HTTPHEADER.ACCEPT] = HTTP_ACCEPT_HEADER_VALUE headers[HTTPHEADER.ACCEPT_ENCODING] = HTTP_ACCEPT_ENCODING_HEADER_VALUE if method != HTTPMETHOD.HEAD else "identity" headers[HTTPHEADER.HOST] = host or getHostHeader(url) if auxHeaders: for key, item in auxHeaders.items(): headers[key] = item for key, item in headers.items(): del headers[key] headers[unicodeencode(key, kb.pageEncoding)] = unicodeencode(item, kb.pageEncoding) post = unicodeencode(post, kb.pageEncoding) if method: req = MethodRequest(url, post, headers) req.set_method(method) else: req = urllib2.Request(url, post, headers) requestHeaders += "\n".join("%s: %s" % (key.capitalize() if isinstance(key, basestring) else key, getUnicode(value)) for (key, value) in req.header_items()) if not getRequestHeader(req, HTTPHEADER.COOKIE) and conf.cj: conf.cj._policy._now = conf.cj._now = int(time.time()) cookies = conf.cj._cookies_for_request(req) requestHeaders += "\n%s" % ("Cookie: %s" % ";".join("%s=%s" % (getUnicode(cookie.name), getUnicode(cookie.value)) for cookie in cookies)) if post: if not getRequestHeader(req, HTTPHEADER.CONTENT_TYPE): requestHeaders += "\n%s: %s" % (string.capwords(HTTPHEADER.CONTENT_TYPE), "application/x-www-form-urlencoded") if not getRequestHeader(req, HTTPHEADER.CONTENT_LENGTH): requestHeaders += "\n%s: %d" % (string.capwords(HTTPHEADER.CONTENT_LENGTH), len(post)) if not getRequestHeader(req, HTTPHEADER.CONNECTION): requestHeaders += "\n%s: close" % HTTPHEADER.CONNECTION requestMsg += "\n%s" % requestHeaders if post: requestMsg += "\n\n%s" % getUnicode(post) requestMsg += "\n" threadData.lastRequestMsg = requestMsg logger.log(CUSTOM_LOGGING.TRAFFIC_OUT, requestMsg) conn = urllib2.urlopen(req) if not kb.authHeader and getRequestHeader(req, HTTPHEADER.AUTHORIZATION): kb.authHeader = getRequestHeader(req, HTTPHEADER.AUTHORIZATION) if not kb.proxyAuthHeader and getRequestHeader(req, HTTPHEADER.PROXY_AUTHORIZATION): kb.proxyAuthHeader = getRequestHeader(req, HTTPHEADER.PROXY_AUTHORIZATION) # Return response object if response: return conn, None, None # Get HTTP response if hasattr(conn, 'redurl'): page = threadData.lastRedirectMsg[1] if kb.redirectChoice == REDIRECTION.NO\ else Connect.__connReadProxy(conn) skipLogTraffic = kb.redirectChoice == REDIRECTION.NO code = conn.redcode else: page = Connect.__connReadProxy(conn) code = code or conn.code responseHeaders = conn.info() responseHeaders[URI_HTTP_HEADER] = conn.geturl() page = decodePage(page, responseHeaders.get(HTTPHEADER.CONTENT_ENCODING), responseHeaders.get(HTTPHEADER.CONTENT_TYPE)) status = getUnicode(conn.msg) if extractRegexResult(META_REFRESH_REGEX, page, re.DOTALL | re.IGNORECASE) and not refreshing: url = extractRegexResult(META_REFRESH_REGEX, page, re.DOTALL | re.IGNORECASE) debugMsg = "got HTML meta refresh header" logger.debug(debugMsg) if kb.alwaysRefresh is None: msg = "sqlmap got a refresh request " msg += "(redirect like response common to login pages). " msg += "Do you want to apply the refresh " msg += "from now on (or stay on the original page)? [Y/n]" choice = readInput(msg, default="Y") kb.alwaysRefresh = choice not in ("n", "N") if kb.alwaysRefresh: if url.lower().startswith('http://'): kwargs['url'] = url else: kwargs['url'] = conf.url[:conf.url.rfind('/')+1] + url threadData.lastRedirectMsg = (threadData.lastRequestUID, page) kwargs['refreshing'] = True kwargs['get'] = None kwargs['post'] = None try: return Connect.__getPageProxy(**kwargs) except sqlmapSyntaxException: pass # Explicit closing of connection object if not conf.keepAlive: try: if hasattr(conn.fp, '_sock'): conn.fp._sock.close() conn.close() except Exception, msg: warnMsg = "problem occured during connection closing ('%s')" % msg logger.warn(warnMsg) except urllib2.HTTPError, e: page = None responseHeaders = None try: page = e.read() responseHeaders = e.info() responseHeaders[URI_HTTP_HEADER] = e.geturl() page = decodePage(page, responseHeaders.get(HTTPHEADER.CONTENT_ENCODING), responseHeaders.get(HTTPHEADER.CONTENT_TYPE)) except socket.timeout: warnMsg = "connection timed out while trying " warnMsg += "to get error page information (%d)" % e.code logger.warn(warnMsg) return None, None, None except KeyboardInterrupt: raise except: pass finally: page = page if isinstance(page, unicode) else getUnicode(page) code = e.code threadData.lastHTTPError = (threadData.lastRequestUID, code) kb.httpErrorCodes[code] = kb.httpErrorCodes.get(code, 0) + 1 status = getUnicode(e.msg) responseMsg += "[#%d] (%d %s):\n" % (threadData.lastRequestUID, code, status) if responseHeaders: logHeaders = "\n".join("%s: %s" % (key.capitalize() if isinstance(key, basestring) else key, getUnicode(value)) for (key, value) in responseHeaders.items()) logHTTPTraffic(requestMsg, "%s%s\n\n%s" % (responseMsg, logHeaders, (page or "")[:MAX_CONNECTION_CHUNK_SIZE])) skipLogTraffic = True if conf.verbose <= 5: responseMsg += getUnicode(logHeaders) elif conf.verbose > 5: responseMsg += "%s\n\n%s" % (logHeaders, (page or "")[:MAX_CONNECTION_CHUNK_SIZE]) logger.log(CUSTOM_LOGGING.TRAFFIC_IN, responseMsg) if e.code == httplib.UNAUTHORIZED: errMsg = "not authorized, try to provide right HTTP " errMsg += "authentication type and valid credentials (%d)" % code raise sqlmapConnectionException, errMsg elif e.code == httplib.NOT_FOUND: if raise404: errMsg = "page not found (%d)" % code raise sqlmapConnectionException, errMsg else: debugMsg = "page not found (%d)" % code logger.debug(debugMsg) processResponse(page, responseHeaders) elif e.code == httplib.GATEWAY_TIMEOUT: if ignoreTimeout: return None, None, None else: warnMsg = "unable to connect to the target url (%d - %s)" % (e.code, httplib.responses[e.code]) if threadData.retriesCount < conf.retries and not kb.threadException: warnMsg += ", sqlmap is going to retry the request" logger.critical(warnMsg) return Connect.__retryProxy(**kwargs) elif kb.testMode: logger.critical(warnMsg) return None, None, None else: raise sqlmapConnectionException, warnMsg else: debugMsg = "got HTTP error code: %d (%s)" % (code, status) logger.debug(debugMsg)
def dbTableValues(self, tableValues): replication = None rtable = None dumpFP = None appendToFile = False warnFile = False if tableValues is None: return db = tableValues["__infos__"]["db"] if not db: db = "All" table = tableValues["__infos__"]["table"] if conf.api: self._write(tableValues, content_type=CONTENT_TYPE.DUMP_TABLE) return dumpDbPath = os.path.join(conf.dumpPath, unsafeSQLIdentificatorNaming(db)) if conf.dumpFormat == DUMP_FORMAT.SQLITE: replication = Replication(os.path.join(conf.dumpPath, "%s.sqlite3" % unsafeSQLIdentificatorNaming(db))) elif conf.dumpFormat in (DUMP_FORMAT.CSV, DUMP_FORMAT.HTML): if not os.path.isdir(dumpDbPath): try: os.makedirs(dumpDbPath) except: warnFile = True _ = unicodeencode(re.sub(r"[^\w]", UNSAFE_DUMP_FILEPATH_REPLACEMENT, unsafeSQLIdentificatorNaming(db))) dumpDbPath = os.path.join(conf.dumpPath, "%s-%s" % (_, hashlib.md5(unicodeencode(db)).hexdigest()[:8])) if not os.path.isdir(dumpDbPath): try: os.makedirs(dumpDbPath) except Exception as ex: try: tempDir = tempfile.mkdtemp(prefix="sqlmapdb") except IOError as _: errMsg = "unable to write to the temporary directory ('%s'). " % _ errMsg += "Please make sure that your disk is not full and " errMsg += "that you have sufficient write permissions to " errMsg += "create temporary files and/or directories" raise SqlmapSystemException(errMsg) warnMsg = "unable to create dump directory " warnMsg += "'%s' (%s). " % (dumpDbPath, getSafeExString(ex)) warnMsg += "Using temporary directory '%s' instead" % tempDir logger.warn(warnMsg) dumpDbPath = tempDir dumpFileName = os.path.join(dumpDbPath, re.sub(r'[\\/]', UNSAFE_DUMP_FILEPATH_REPLACEMENT, "%s.%s" % (unsafeSQLIdentificatorNaming(table), conf.dumpFormat.lower()))) if not checkFile(dumpFileName, False): try: openFile(dumpFileName, "w+b").close() except SqlmapSystemException: raise except: warnFile = True _ = re.sub(r"[^\w]", UNSAFE_DUMP_FILEPATH_REPLACEMENT, normalizeUnicode(unsafeSQLIdentificatorNaming(table))) if len(_) < len(table) or IS_WIN and table.upper() in WINDOWS_RESERVED_NAMES: _ = unicodeencode(re.sub(r"[^\w]", UNSAFE_DUMP_FILEPATH_REPLACEMENT, unsafeSQLIdentificatorNaming(table))) dumpFileName = os.path.join(dumpDbPath, "%s-%s.%s" % (_, hashlib.md5(unicodeencode(table)).hexdigest()[:8], conf.dumpFormat.lower())) else: dumpFileName = os.path.join(dumpDbPath, "%s.%s" % (_, conf.dumpFormat.lower())) else: appendToFile = any((conf.limitStart, conf.limitStop)) if not appendToFile: count = 1 while True: candidate = "%s.%d" % (dumpFileName, count) if not checkFile(candidate, False): try: shutil.copyfile(dumpFileName, candidate) except IOError: pass finally: break else: count += 1 dumpFP = openFile(dumpFileName, "wb" if not appendToFile else "ab", buffering=DUMP_FILE_BUFFER_SIZE) count = int(tableValues["__infos__"]["count"]) separator = str() field = 1 fields = len(tableValues) - 1 columns = prioritySortColumns(tableValues.keys()) if conf.col: cols = conf.col.split(',') columns = sorted(columns, key=lambda _: cols.index(_) if _ in cols else 0) for column in columns: if column != "__infos__": info = tableValues[column] lines = "-" * (int(info["length"]) + 2) separator += "+%s" % lines separator += "+" self._write("Database: %s\nTable: %s" % (unsafeSQLIdentificatorNaming(db) if db else "Current database", unsafeSQLIdentificatorNaming(table))) if conf.dumpFormat == DUMP_FORMAT.SQLITE: cols = [] for column in columns: if column != "__infos__": colType = Replication.INTEGER for value in tableValues[column]['values']: try: if not value or value == " ": # NULL continue int(value) except ValueError: colType = None break if colType is None: colType = Replication.REAL for value in tableValues[column]['values']: try: if not value or value == " ": # NULL continue float(value) except ValueError: colType = None break cols.append((unsafeSQLIdentificatorNaming(column), colType if colType else Replication.TEXT)) rtable = replication.createTable(table, cols) elif conf.dumpFormat == DUMP_FORMAT.HTML: dataToDumpFile(dumpFP, "<!DOCTYPE html>\n<html>\n<head>\n") dataToDumpFile(dumpFP, "<meta http-equiv=\"Content-type\" content=\"text/html;charset=%s\">\n" % UNICODE_ENCODING) dataToDumpFile(dumpFP, "<meta name=\"generator\" content=\"%s\" />\n" % VERSION_STRING) dataToDumpFile(dumpFP, "<title>%s</title>\n" % ("%s%s" % ("%s." % db if METADB_SUFFIX not in db else "", table))) dataToDumpFile(dumpFP, HTML_DUMP_CSS_STYLE) dataToDumpFile(dumpFP, "\n</head>\n<body>\n<table>\n<thead>\n<tr>\n") if count == 1: self._write("[1 entry]") else: self._write("[%d entries]" % count) self._write(separator) for column in columns: if column != "__infos__": info = tableValues[column] column = unsafeSQLIdentificatorNaming(column) maxlength = int(info["length"]) blank = " " * (maxlength - len(column)) self._write("| %s%s" % (column, blank), newline=False) if not appendToFile: if conf.dumpFormat == DUMP_FORMAT.CSV: if field == fields: dataToDumpFile(dumpFP, "%s" % safeCSValue(column)) else: dataToDumpFile(dumpFP, "%s%s" % (safeCSValue(column), conf.csvDel)) elif conf.dumpFormat == DUMP_FORMAT.HTML: dataToDumpFile(dumpFP, "<th>%s</th>" % cgi.escape(column).encode("ascii", "xmlcharrefreplace")) field += 1 if conf.dumpFormat == DUMP_FORMAT.HTML: dataToDumpFile(dumpFP, "\n</tr>\n</thead>\n<tbody>\n") self._write("|\n%s" % separator) if conf.dumpFormat == DUMP_FORMAT.CSV: dataToDumpFile(dumpFP, "\n" if not appendToFile else "") elif conf.dumpFormat == DUMP_FORMAT.SQLITE: rtable.beginTransaction() if count > TRIM_STDOUT_DUMP_SIZE: warnMsg = "console output will be trimmed to " warnMsg += "last %d rows due to " % TRIM_STDOUT_DUMP_SIZE warnMsg += "large table size" logger.warning(warnMsg) for i in xrange(count): console = (i >= count - TRIM_STDOUT_DUMP_SIZE) field = 1 values = [] if conf.dumpFormat == DUMP_FORMAT.HTML: dataToDumpFile(dumpFP, "<tr>") for column in columns: if column != "__infos__": info = tableValues[column] if len(info["values"]) <= i: continue if info["values"][i] is None: value = u'' else: value = getUnicode(info["values"][i]) value = DUMP_REPLACEMENTS.get(value, value) values.append(value) maxlength = int(info["length"]) blank = " " * (maxlength - len(value)) self._write("| %s%s" % (value, blank), newline=False, console=console) if len(value) > MIN_BINARY_DISK_DUMP_SIZE and r'\x' in value: try: mimetype = magic.from_buffer(value, mime=True) if any(mimetype.startswith(_) for _ in ("application", "image")): if not os.path.isdir(dumpDbPath): os.makedirs(dumpDbPath) _ = re.sub(r"[^\w]", UNSAFE_DUMP_FILEPATH_REPLACEMENT, normalizeUnicode(unsafeSQLIdentificatorNaming(column))) filepath = os.path.join(dumpDbPath, "%s-%d.bin" % (_, randomInt(8))) warnMsg = "writing binary ('%s') content to file '%s' " % (mimetype, filepath) logger.warn(warnMsg) with open(filepath, "wb") as f: _ = safechardecode(value, True) f.write(_) except magic.MagicException as ex: logger.debug(getSafeExString(ex)) if conf.dumpFormat == DUMP_FORMAT.CSV: if field == fields: dataToDumpFile(dumpFP, "%s" % safeCSValue(value)) else: dataToDumpFile(dumpFP, "%s%s" % (safeCSValue(value), conf.csvDel)) elif conf.dumpFormat == DUMP_FORMAT.HTML: dataToDumpFile(dumpFP, "<td>%s</td>" % cgi.escape(value).encode("ascii", "xmlcharrefreplace")) field += 1 if conf.dumpFormat == DUMP_FORMAT.SQLITE: try: rtable.insert(values) except SqlmapValueException: pass elif conf.dumpFormat == DUMP_FORMAT.CSV: dataToDumpFile(dumpFP, "\n") elif conf.dumpFormat == DUMP_FORMAT.HTML: dataToDumpFile(dumpFP, "</tr>\n") self._write("|", console=console) self._write("%s\n" % separator) if conf.dumpFormat == DUMP_FORMAT.SQLITE: rtable.endTransaction() logger.info("table '%s.%s' dumped to sqlite3 database '%s'" % (db, table, replication.dbpath)) elif conf.dumpFormat in (DUMP_FORMAT.CSV, DUMP_FORMAT.HTML): if conf.dumpFormat == DUMP_FORMAT.HTML: dataToDumpFile(dumpFP, "</tbody>\n</table>\n</body>\n</html>") else: dataToDumpFile(dumpFP, "\n") dumpFP.close() msg = "table '%s.%s' dumped to %s file '%s'" % (db, table, conf.dumpFormat, dumpFileName) if not warnFile: logger.info(msg) else: logger.warn(msg)
errMsg = "unable to write to the temporary directory ('%s'). " % _ errMsg += "Please make sure that your disk is not full and " errMsg += "that you have sufficient write permissions to " errMsg += "create temporary files and/or directories" raise SqlmapSystemException(errMsg) warnMsg = "unable to create dump directory " warnMsg += "'%s' (%s). " % (dumpDbPath, ex) warnMsg += "Using temporary directory '%s' instead" % tempDir logger.warn(warnMsg) dumpDbPath = tempDir _ = re.sub(r"[^\w]", "_", normalizeUnicode(unsafeSQLIdentificatorNaming(table))) if len(_) < len(table) or IS_WIN and table.upper() in WINDOWS_RESERVED_NAMES: _ = unicodeencode(re.sub(r"[^\w]", "_", unsafeSQLIdentificatorNaming(table))) dumpFileName = os.path.join( dumpDbPath, "%s-%s.%s" % (_, hashlib.md5(unicodeencode(table)).hexdigest()[:8], conf.dumpFormat.lower()), ) warnFile = True else: dumpFileName = os.path.join(dumpDbPath, "%s.%s" % (_, conf.dumpFormat.lower())) appendToFile = os.path.isfile(dumpFileName) and any((conf.limitStart, conf.limitStop)) dumpFP = openFile(dumpFileName, "wb" if not appendToFile else "ab") count = int(tableValues["__infos__"]["count"]) separator = str() field = 1 fields = len(tableValues) - 1
errMsg = "unable to write to the temporary directory ('%s'). " % _ errMsg += "Please make sure that your disk is not full and " errMsg += "that you have sufficient write permissions to " errMsg += "create temporary files and/or directories" raise SqlmapSystemException(errMsg) warnMsg = "unable to create dump directory " warnMsg += "'%s' (%s). " % (dumpDbPath, ex) warnMsg += "Using temporary directory '%s' instead" % tempDir logger.warn(warnMsg) dumpDbPath = tempDir _ = re.sub(r"[^\w]", "_", normalizeUnicode(unsafeSQLIdentificatorNaming(table))) if len(_) < len(table) or IS_WIN and table.upper() in WINDOWS_RESERVED_NAMES: _ = unicodeencode(re.sub(r"[^\w]", "_", unsafeSQLIdentificatorNaming(table))) dumpFileName = os.path.join(dumpDbPath, "%s-%s.%s" % (_, hashlib.md5(unicodeencode(table)).hexdigest()[:8], conf.dumpFormat.lower())) warnFile = True else: dumpFileName = os.path.join(dumpDbPath, "%s.%s" % (_, conf.dumpFormat.lower())) appendToFile = os.path.isfile(dumpFileName) and any((conf.limitStart, conf.limitStop)) dumpFP = openFile(dumpFileName, "wb" if not appendToFile else "ab") count = int(tableValues["__infos__"]["count"]) separator = str() field = 1 fields = len(tableValues) - 1 columns = prioritySortColumns(tableValues.keys())
errMsg = "unable to write to the temporary directory ('%s'). " % _ errMsg += "Please make sure that your disk is not full and " errMsg += "that you have sufficient write permissions to " errMsg += "create temporary files and/or directories" raise SqlmapSystemException(errMsg) warnMsg = "unable to create dump directory " warnMsg += "'%s' (%s). " % (dumpDbPath, ex) warnMsg += "Using temporary directory '%s' instead" % tempDir logger.warn(warnMsg) dumpDbPath = tempDir _ = re.sub(r"[^\w]", "_", normalizeUnicode(unsafeSQLIdentificatorNaming(table))) if len(_) < len(table) or IS_WIN and table.upper() in WINDOWS_RESERVED_NAMES: _ = unicodeencode(re.sub(r"[^\w]", "_", unsafeSQLIdentificatorNaming(table))) dumpFileName = os.path.join(dumpDbPath, "%s-%s.%s" % ( _, hashlib.md5(unicodeencode(table)).hexdigest()[:8], conf.dumpFormat.lower())) warnFile = True else: dumpFileName = os.path.join(dumpDbPath, "%s.%s" % (_, conf.dumpFormat.lower())) appendToFile = os.path.isfile(dumpFileName) and any((conf.limitStart, conf.limitStop)) dumpFP = openFile(dumpFileName, "wb" if not appendToFile else "ab") count = int(tableValues["__infos__"]["count"]) separator = str() field = 1 fields = len(tableValues) - 1 columns = prioritySortColumns(tableValues.keys())