Exemplo n.º 1
0
def storeHashesToFile(attack_dict):
    if not attack_dict:
        return

    items = OrderedSet()

    for user, hashes in attack_dict.items():
        for hash_ in hashes:
            hash_ = hash_.split()[0] if hash_ and hash_.strip() else hash_
            if hash_ and hash_ != NULL and hashRecognition(hash_):
                item = None
                if user and not user.startswith(DUMMY_USER_PREFIX):
                    item = "%s:%s\n" % (user.encode(UNICODE_ENCODING), hash_.encode(UNICODE_ENCODING))
                else:
                    item = "%s\n" % hash_.encode(UNICODE_ENCODING)

                if item and item not in items:
                    items.add(item)

    if kb.storeHashesChoice is None:
        message = "do you want to store hashes to a temporary file "
        message += "for eventual further processing with other tools [y/N] "

        kb.storeHashesChoice = readInput(message, default='N', boolean=True)

    if items and kb.storeHashesChoice:
        handle, filename = tempfile.mkstemp(prefix=MKSTEMP_PREFIX.HASHES, suffix=".txt")
        os.close(handle)

        infoMsg = "writing hashes to a temporary file '%s' " % filename
        logger.info(infoMsg)

        with open(filename, "w+") as f:
            for item in items:
                f.write(item)
Exemplo n.º 2
0
def tamper(payload, **kwargs):
    """
    Adds multiple spaces (' ') around SQL keywords
    Notes:
        * Useful to bypass very weak and bespoke web application firewalls
          that has poorly written permissive regular expressions
    Reference: https://www.owasp.org/images/7/74/Advanced_SQL_Injection.ppt
    >>> random.seed(0)
    >>> tamper('1 UNION SELECT foobar')
    '1     UNION     SELECT     foobar'
    """

    retVal = payload

    if payload:
        words = OrderedSet()

        for match in re.finditer(r"\b[A-Za-z_]+\b", payload):
            word = match.group()

            if word.upper() in kb.keywords:
                words.add(word)

        for word in words:
            retVal = re.sub(r"(?<=\W)%s(?=[^A-Za-z_(]|\Z)" % word, "%s%s%s" % (' ' * random.randint(1, 4), word, ' ' * random.randint(1, 4)), retVal)
            retVal = re.sub(r"(?<=\W)%s(?=[(])" % word, "%s%s" % (' ' * random.randint(1, 4), word), retVal)

    return retVal
Exemplo n.º 3
0
def parseSitemap(url, retVal=None):
    global abortedFlag

    if retVal is not None:
        logger.debug("parsing sitemap '%s'" % url)

    try:
        if retVal is None:
            abortedFlag = False
            retVal = OrderedSet()

        try:
            content = Request.getPage(
                url=url, raise404=True)[0] if not abortedFlag else ""
        except _http_client.InvalidURL:
            errMsg = "invalid URL given for sitemap ('%s')" % url
            raise SqlmapSyntaxException(errMsg)

        for match in re.finditer(r"<loc>\s*([^<]+)", content or ""):
            if abortedFlag:
                break
            url = match.group(1).strip()
            if url.endswith(".xml") and "sitemap" in url.lower():
                if kb.followSitemapRecursion is None:
                    message = "sitemap recursion detected. Do you want to follow? [y/N] "
                    kb.followSitemapRecursion = readInput(message,
                                                          default='N',
                                                          boolean=True)
                if kb.followSitemapRecursion:
                    parseSitemap(url, retVal)
            else:
                retVal.add(url)

    except KeyboardInterrupt:
        abortedFlag = True
        warnMsg = "user aborted during sitemap parsing. sqlmap "
        warnMsg += "will use partial list"
        logger.warn(warnMsg)

    return retVal
Exemplo n.º 4
0
def parseSitemap(url, retVal=None):
    global abortedFlag

    if retVal is not None:
        logger.debug("parsing sitemap '%s'" % url)

    try:
        if retVal is None:
            abortedFlag = False
            retVal = OrderedSet()

        try:
            content = Request.getPage(url=url, raise404=True)[0] if not abortedFlag else ""
        except _http_client.InvalidURL:
            errMsg = "invalid URL given for sitemap ('%s')" % url
            raise SqlmapSyntaxException(errMsg)

        for match in re.finditer(r"<loc>\s*([^<]+)", content or ""):
            if abortedFlag:
                break
            url = match.group(1).strip()
            if url.endswith(".xml") and "sitemap" in url.lower():
                if kb.followSitemapRecursion is None:
                    message = "sitemap recursion detected. Do you want to follow? [y/N] "
                    kb.followSitemapRecursion = readInput(message, default='N', boolean=True)
                if kb.followSitemapRecursion:
                    parseSitemap(url, retVal)
            else:
                retVal.add(url)

    except KeyboardInterrupt:
        abortedFlag = True
        warnMsg = "user aborted during sitemap parsing. sqlmap "
        warnMsg += "will use partial list"
        logger.warn(warnMsg)

    return retVal
Exemplo n.º 5
0
def crawl(target):
    try:
        visited = set()
        threadData = getCurrentThreadData()
        threadData.shared.value = OrderedSet()
        threadData.shared.formsFound = False

        def crawlThread():
            threadData = getCurrentThreadData()

            while kb.threadContinue:
                with kb.locks.limit:
                    if threadData.shared.unprocessed:
                        current = threadData.shared.unprocessed.pop()
                        if current in visited:
                            continue
                        elif conf.crawlExclude and re.search(conf.crawlExclude, current):
                            dbgMsg = "skipping '%s'" % current
                            logger.debug(dbgMsg)
                            continue
                        else:
                            visited.add(current)
                    else:
                        break

                content = None
                try:
                    if current:
                        content = Request.getPage(url=current, crawling=True, raise404=False)[0]
                except SqlmapConnectionException as ex:
                    errMsg = "connection exception detected ('%s'). skipping " % getSafeExString(ex)
                    errMsg += "URL '%s'" % current
                    logger.critical(errMsg)
                except SqlmapSyntaxException:
                    errMsg = "invalid URL detected. skipping '%s'" % current
                    logger.critical(errMsg)
                except _http_client.InvalidURL as ex:
                    errMsg = "invalid URL detected ('%s'). skipping " % getSafeExString(ex)
                    errMsg += "URL '%s'" % current
                    logger.critical(errMsg)

                if not kb.threadContinue:
                    break

                if isinstance(content, six.text_type):
                    try:
                        match = re.search(r"(?si)<html[^>]*>(.+)</html>", content)
                        if match:
                            content = "<html>%s</html>" % match.group(1)

                        soup = BeautifulSoup(content)
                        tags = soup('a')

                        tags += re.finditer(r'(?i)\s(href|src)=["\'](?P<href>[^>"\']+)', content)
                        tags += re.finditer(r'(?i)window\.open\(["\'](?P<href>[^)"\']+)["\']', content)

                        for tag in tags:
                            href = tag.get("href") if hasattr(tag, "get") else tag.group("href")

                            if href:
                                if threadData.lastRedirectURL and threadData.lastRedirectURL[0] == threadData.lastRequestUID:
                                    current = threadData.lastRedirectURL[1]
                                url = _urllib.parse.urljoin(current, htmlUnescape(href))

                                # flag to know if we are dealing with the same target host
                                _ = checkSameHost(url, target)

                                if conf.scope:
                                    if not re.search(conf.scope, url, re.I):
                                        continue
                                elif not _:
                                    continue

                                if (extractRegexResult(r"\A[^?]+\.(?P<result>\w+)(\?|\Z)", url) or "").lower() not in CRAWL_EXCLUDE_EXTENSIONS:
                                    with kb.locks.value:
                                        threadData.shared.deeper.add(url)
                                        if re.search(r"(.*?)\?(.+)", url) and not re.search(r"\?(v=)?\d+\Z", url) and not re.search(r"(?i)\.(js|css)(\?|\Z)", url):
                                            threadData.shared.value.add(url)
                    except UnicodeEncodeError:  # for non-HTML files
                        pass
                    except ValueError:          # for non-valid links
                        pass
                    finally:
                        if conf.forms:
                            threadData.shared.formsFound |= len(findPageForms(content, current, False, True)) > 0

                if conf.verbose in (1, 2):
                    threadData.shared.count += 1
                    status = '%d/%d links visited (%d%%)' % (threadData.shared.count, threadData.shared.length, round(100.0 * threadData.shared.count / threadData.shared.length))
                    dataToStdout("\r[%s] [INFO] %s" % (time.strftime("%X"), status), True)

        threadData.shared.deeper = set()
        threadData.shared.unprocessed = set([target])

        if kb.checkSitemap is None:
            message = "do you want to check for the existence of "
            message += "site's sitemap(.xml) [y/N] "
            kb.checkSitemap = readInput(message, default='N', boolean=True)

        if kb.checkSitemap:
            found = True
            items = None
            url = _urllib.parse.urljoin(target, "/sitemap.xml")
            try:
                items = parseSitemap(url)
            except SqlmapConnectionException as ex:
                if "page not found" in getSafeExString(ex):
                    found = False
                    logger.warn("'sitemap.xml' not found")
            except:
                pass
            finally:
                if found:
                    if items:
                        for item in items:
                            if re.search(r"(.*?)\?(.+)", item):
                                threadData.shared.value.add(item)
                        if conf.crawlDepth > 1:
                            threadData.shared.unprocessed.update(items)
                    logger.info("%s links found" % ("no" if not items else len(items)))

        if not conf.bulkFile:
            infoMsg = "starting crawler for target URL '%s'" % target
            logger.info(infoMsg)

        for i in xrange(conf.crawlDepth):
            threadData.shared.count = 0
            threadData.shared.length = len(threadData.shared.unprocessed)
            numThreads = min(conf.threads, len(threadData.shared.unprocessed))

            if not conf.bulkFile:
                logger.info("searching for links with depth %d" % (i + 1))

            runThreads(numThreads, crawlThread, threadChoice=(i > 0))
            clearConsoleLine(True)

            if threadData.shared.deeper:
                threadData.shared.unprocessed = set(threadData.shared.deeper)
            else:
                break

    except KeyboardInterrupt:
        warnMsg = "user aborted during crawling. sqlmap "
        warnMsg += "will use partial list"
        logger.warn(warnMsg)

    finally:
        clearConsoleLine(True)

        if not threadData.shared.value:
            if not (conf.forms and threadData.shared.formsFound):
                warnMsg = "no usable links found (with GET parameters)"
                if conf.forms:
                    warnMsg += " or forms"
                logger.warn(warnMsg)
        else:
            for url in threadData.shared.value:
                kb.targets.add((urldecode(url, kb.pageEncoding), None, None, None, None))

        if kb.targets:
            if kb.normalizeCrawlingChoice is None:
                message = "do you want to normalize "
                message += "crawling results [Y/n] "

                kb.normalizeCrawlingChoice = readInput(message, default='Y', boolean=True)

            if kb.normalizeCrawlingChoice:
                seen = set()
                results = OrderedSet()

                for target in kb.targets:
                    if target[1] in (HTTPMETHOD.GET, None):
                        match = re.search(r"/[^/?]*\?.*\Z", target[0])
                        if match:
                            key = re.sub(r"=[^=&]*", "=", match.group(0)).strip('&')
                            if key not in seen:
                                results.add(target)
                                seen.add(key)
                    else:
                        results.add(target)

                kb.targets = results

            storeResultsToFile(kb.targets)
Exemplo n.º 6
0
def attackDumpedTable():
    if kb.data.dumpedTable:
        table = kb.data.dumpedTable
        columns = list(table.keys())
        count = table["__infos__"]["count"]

        if not count:
            return

        debugMsg = "analyzing table dump for possible password hashes"
        logger.debug(debugMsg)

        found = False
        col_user = ''
        col_passwords = set()
        attack_dict = {}
        binary_fields = OrderedSet()
        replacements = {}

        for column in sorted(columns, key=len, reverse=True):
            if column and column.lower() in COMMON_USER_COLUMNS:
                col_user = column
                break

        for column in columns:
            if column != "__infos__" and table[column]["values"]:
                if all(INVALID_UNICODE_CHAR_FORMAT.split('%')[0] in (value or "") for value in table[column]["values"]):
                    binary_fields.add(column)

        if binary_fields:
            _ = ','.join(binary_fields)
            warnMsg = "potential binary fields detected ('%s'). In case of any problems you are " % _
            warnMsg += "advised to rerun table dump with '--fresh-queries --binary-fields=\"%s\"'" % _
            logger.warn(warnMsg)

        for i in xrange(count):
            if not found and i > HASH_RECOGNITION_QUIT_THRESHOLD:
                break

            for column in columns:
                if column == col_user or column == "__infos__":
                    continue

                if len(table[column]["values"]) <= i:
                    continue

                if conf.binaryFields and column in conf.binaryFields:
                    continue

                value = table[column]["values"][i]

                if column in binary_fields and re.search(HASH_BINARY_COLUMNS_REGEX, column) is not None:
                    previous = value
                    value = encodeHex(getBytes(value), binary=False)
                    replacements[value] = previous

                if hashRecognition(value):
                    found = True

                    if col_user and i < len(table[col_user]["values"]):
                        if table[col_user]["values"][i] not in attack_dict:
                            attack_dict[table[col_user]["values"][i]] = []

                        attack_dict[table[col_user]["values"][i]].append(value)
                    else:
                        attack_dict["%s%d" % (DUMMY_USER_PREFIX, i)] = [value]

                    col_passwords.add(column)

        if attack_dict:
            infoMsg = "recognized possible password hashes in column%s " % ("s" if len(col_passwords) > 1 else "")
            infoMsg += "'%s'" % ", ".join(col for col in col_passwords)
            logger.info(infoMsg)

            storeHashesToFile(attack_dict)

            message = "do you want to crack them via a dictionary-based attack? %s" % ("[y/N/q]" if conf.multipleTargets else "[Y/n/q]")
            choice = readInput(message, default='N' if conf.multipleTargets else 'Y').upper()

            if choice == 'N':
                return
            elif choice == 'Q':
                raise SqlmapUserQuitException

            results = dictionaryAttack(attack_dict)
            lut = dict()

            for (_, hash_, password) in results:
                if hash_:
                    key = hash_ if hash_ not in replacements else replacements[hash_]
                    lut[key.lower()] = password
                    lut["0x%s" % key.lower()] = password

            debugMsg = "post-processing table dump"
            logger.debug(debugMsg)

            for i in xrange(count):
                for column in columns:
                    if not (column == col_user or column == '__infos__' or len(table[column]['values']) <= i):
                        value = table[column]['values'][i]

                        if value and value.lower() in lut:
                            table[column]['values'][i] = "%s (%s)" % (getUnicode(table[column]['values'][i]), getUnicode(lut[value.lower()] or HASH_EMPTY_PASSWORD_MARKER))
                            table[column]['length'] = max(table[column]['length'], len(table[column]['values'][i]))