コード例 #1
0
ファイル: serverinfo.py プロジェクト: Hadesy2k/sqlivulscan
def __getserverinfo(url):
    """get server name and version of given domain"""

    url = urlparse(url).netloc if urlparse(url).netloc != '' else urlparse(url).path.split("/")[0]

    info = []  # to store server info
    url = "https://aruljohn.com/webserver/" + url

    try:
        result = web.gethtml(url)
    except KeyboardInterrupt:
        raise KeyboardInterrupt

    try:
        soup = bs4.BeautifulSoup(result, "lxml")
    except:
        return ['', '']

    if soup.findAll('p', {"class" : "err"}):
        return ['', '']

    for row in soup.findAll('tr'):
        if row.findAll('td', {"class": "title"}):
            info.append(row.findAll('td')[1].text.rstrip('\r'))

    return info
コード例 #2
0
ファイル: scanner.py プロジェクト: zyzgc1997/sqliv-M
def __sqli(url):
    """check SQL injection vulnerability"""

    std.stdout("scanning {}".format(url), end="")

    domain = url.split("?")[0]  # domain with path without queries
    queries = urlparse(url).query.split("&")
    # no queries in url
    if not any(queries):
        print ""  # move cursor to new line
        return False, None
    payloads = ("'", "')", "';", '"', '")', '";', '`', '`)', '`;', '\\', "%27",
                "%%2727", "%25%27", "%60", "%5C")
    for payload in payloads:
        # website = domain + "?" + ("&".join([param + payload for param in queries]))
        for param_i in xrange(len(queries)):
            website = __single_arg(param_i, queries, payload, domain)
            source = web.gethtml(website)
            if source:
                vulnerable, db = sqlerrors.check(source)
                if vulnerable and db != None:
                    print ""  # move cursor to new line
                    std.showsign(website + " vulnerable")
                    return True, db

    print ""  # move cursor to new line
    return False, None
コード例 #3
0
ファイル: serverinfo.py プロジェクト: icysun/sqliv
def __getserverinfo(url):
    """get server name and version of given domain"""

    url = urlparse(url).netloc if urlparse(url).netloc != '' else urlparse(
        url).path.split("/")[0]

    info = []  # to store server info
    url = "https://aruljohn.com/webserver/" + url

    try:
        result = web.gethtml(url)
    except KeyboardInterrupt:
        raise KeyboardInterrupt

    try:
        soup = bs4.BeautifulSoup(result, "lxml")
    except:
        return ['', '']

    if soup.findAll('p', {"class": "err"}):
        return ['', '']

    for row in soup.findAll('tr'):
        if row.findAll('td', {"class": "title"}):
            info.append(row.findAll('td')[1].text.rstrip('\r'))

    return info
コード例 #4
0
ファイル: crawler.py プロジェクト: icysun/sqliv
def crawl(url):
    """crawl the links of the same given domain"""
    global links

    links = []

    try:
        result, URL = web.gethtml(url, lastURL=True)
    except:
        return None

    if result:
        # get only domain name
        domain = 'http://' + '/'.join(URL.split('/')[2:-1]) + '/' if len(
            URL.split('/')) >= 4 else URL.rstrip('/') + '/'

        for link in re.findall('<a href="(.*?)"', result):
            # www.example.com/index.(php|aspx|jsp)?query=1
            if re.search('(.*?)(.php\?|.asp\?|.apsx\?|.jsp\?)(.*?)=(.*?)',
                         link):
                if parameterControl(link) == True:
                    if link.startswith(
                        ("http", "www")) or domain in urlparse(link).path:
                        links.append(link)
                    else:
                        links.append(domain +
                                     link if link.startswith("/") else domain +
                                     link)

    return links
コード例 #5
0
def __sqli(url):
    """check SQL injection vulnerability"""

    io.stdout("scanning {}".format(url), end="")

    domain = url.split("?")[0]  # domain with path without queries
    queries = urlparse(url).query.split("&")

    # no queries in url
    if not any(queries):
        return False, None

    website = domain + "?" + ("&".join([param + "'" for param in queries]))
    source = web.gethtml(website)
    if source:
        vulnerable, db = sqlerrors.check(source)
        if vulnerable and db != None:
            io.showsign(" vulnerable")
            return True, db

    print ""  # move cursor to new line
    return False, None
コード例 #6
0
ファイル: scanner.py プロジェクト: Hadesy2k/sqlivulscan
def __sqli(url):
    """check SQL injection vulnerability"""

    std.stdout("scanning {}".format(url), end="")

    domain = url.split("?")[0]  # domain with path without queries
    queries = urlparse(url).query.split("&")
    # no queries in url
    if not any(queries):
        print "" # move cursor to new line
        return False, None

    payloads = ("'", "')", "';", '"', '")', '";', '`', '`)', '`;', '\\', "%27", "%%2727", "%25%27", "%60", "%5C")
    for payload in payloads:
        website = domain + "?" + ("&".join([param + payload for param in queries]))
        source = web.gethtml(website)
        if source:
            vulnerable, db = sqlerrors.check(source)
            if vulnerable and db != None:
                std.showsign(" vulnerable")
                return True, db

    print ""  # move cursor to new line
    return False, None