Esempio n. 1
0
def webScan():
    startTime = time.clock()
    e = Exploit_run(urlconfig.threadNum)

    for url in urlconfig.url:
        logger.info('ScanStart Target:%s' % url)
        e.setCurrentUrl(url)
        e.load_modules("www", url)
        e.run()
        if not urlconfig.mutiurl:
            e.init_spider()
            s = crawler.SpiderMain(url)
            s.craw()
        time.sleep(0.1)

    endTime = time.clock()
    urlconfig.runningTime = endTime - startTime
    e.report()
Esempio n. 2
0
def pluginScan():
    if not urlconfig.usePlugin:
        return False
    urlconfig.scanport = False
    urlconfig.find_service = False
    urlconfig.diyPlugin = LIST_PLUGINS
    startTime = time.clock()
    e = Exploit_run(urlconfig.threadNum)
    for u in urlconfig.url:
        logger.info('ScanStart Target:%s' % u)
        e.setCurrentUrl(u)
        e.load_modules(urlconfig.plugin, u)
        e.run()
        time.sleep(0.01)
    endTime = time.clock()
    urlconfig.runningTime = endTime - startTime
    e.report()
    sys.exit()
Esempio n. 3
0
def pluginScan():
    if not urlconfig.mutiurl:
        return False
    urlconfig.scanport = False
    urlconfig.find_service = False
    urlconfig.threadNum = 5
    urlconfig.deepMax = 100
    urlconfig.diyPlugin = LIST_PLUGINS
    startTime = time.clock()
    e = Exploit_run(urlconfig.threadNum)
    for u in urlconfig.url:
        printMessage('[***] ScanStart Target:%s' % u)
        e.setCurrentUrl(u)
        e.load_modules(urlconfig.plugin,u)
        e.run()
        time.sleep(0.01)
    endTime = time.clock()
    urlconfig.runningTime = endTime - startTime
    e.report()
    sys.exit()
Esempio n. 4
0
def main():
    """
    Main function of w9scan when running from command line.
    """
    checkEnvironment()  # 检测环境
    setPaths(modulePath())  # 为一些目录和文件设置了绝对路径
    banner()

    parser = argparse.ArgumentParser(description="w9scan scanner")
    parser.add_argument("--update", help="update w9scan", action="store_true")
    parser.add_argument("--guide", help="w9scan to guide", action="store_true")
    parser.add_argument("-u", help="url")
    parser.add_argument("-p", "--plugin", help="plugins")
    parser.add_argument("-s", "--search", help="find infomation of plugin")

    args = parser.parse_args()
    urlconfig.mutiurl = False
    urlconfig.url = []
    if args.update:
        updateProgram()
        return 0
    if args.search:
        print(getPluginNum(args.search))
        return 0
    if args.u and args.plugin:
        url = args.u
        if url.startswith("@"):
            urlconfig.mutiurl = True
            fileName = url[1:]
            try:
                o = open(fileName, "r").readlines()
                for u in o:
                    urlconfig.url.append(makeurl(u.strip()))
            except IOError as error:
                logger.critical("Filename:'%s' open faild" % fileName)
                exit()
            if len(o) == 0:
                logger.critical("[xxx] The target address is empty")
                exit()
            print urlconfig.url
        else:
            urlconfig.url.append(makeurl(url))

        urlconfig.scanport = False
        urlconfig.find_service = False
        urlconfig.threadNum = 5
        urlconfig.deepMax = 100
        urlconfig.diyPlugin = LIST_PLUGINS
        startTime = time.clock()
        e = Exploit_run(urlconfig.threadNum)
        for u in urlconfig.url:
            print('[***] ScanStart Target:%s' % u)
            e.setCurrentUrl(u)
            e.load_modules(args.plugin, u)
            e.run()
            time.sleep(0.01)
        endTime = time.clock()
        urlconfig.runningTime = endTime - startTime
        e.report()
        return 0
    try:
        inputUrl = raw_input('[1] Input url > ')

        if inputUrl is '':
            logger.critical("[xxx] You have to enter the url")
            exit()
        if inputUrl.startswith("@"):
            urlconfig.mutiurl = True
            fileName = inputUrl[1:]
            try:
                o = open(fileName, "r").readlines()
                for url in o:
                    urlconfig.url.append(makeurl(url.strip()))
            except IOError as error:
                logger.critical("Filename:'%s' open faild" % fileName)
                exit()
            if len(o) == 0:
                logger.critical("[xxx] The target address is empty")
                exit()
            print urlconfig.url
        else:
            urlconfig.url.append(makeurl(inputUrl))
        print '[***] URL has been loaded:%d' % len(urlconfig.url)
        print("[Tips] You can select these plugins (%s) or select all" %
              (' '.join(LIST_PLUGINS)))
        diyPlugin = raw_input("[2] Please select the required plugins > ")

        if diyPlugin.lower() == 'all':
            urlconfig.diyPlugin = LIST_PLUGINS
        else:
            urlconfig.diyPlugin = diyPlugin.strip().split(' ')
        print "[***] You select the plugins:%s" % (' '.join(
            urlconfig.diyPlugin))
        urlconfig.scanport = False
        urlconfig.find_service = False
        if 'find_service' in urlconfig.diyPlugin:
            urlconfig.find_service = True
            input_scanport = raw_input(
                '[2.1] Need you scan all ports ?(Y/N) (default N)> ')
            if input_scanport.lower() in ("y", "yes"):
                urlconfig.scanport = True

        urlconfig.threadNum = raw_input(
            '[3] You need start number of thread (default 5) > ')
        if urlconfig.threadNum == '':
            urlconfig.threadNum = 5

        urlconfig.threadNum = int(urlconfig.threadNum)
        urlconfig.deepMax = raw_input(
            '[4] Set the depth of the crawler (default 200 | 0 don\'t use crawler ) > '
        )
        if urlconfig.deepMax == '':
            urlconfig.deepMax = 100

        startTime = time.clock()
        e = Exploit_run(urlconfig.threadNum)

        for url in urlconfig.url:
            print('[***] ScanStart Target:%s' % url)
            e.setCurrentUrl(url)
            e.load_modules("www", url)
            e.run()
            if not urlconfig.mutiurl:
                e.init_spider()
                s = crawler.SpiderMain(url)
                s.craw()
            time.sleep(0.01)

        endTime = time.clock()
        urlconfig.runningTime = endTime - startTime
        e.report()

    except KeyboardInterrupt:
        print("[***] User Interrupt")
        exit()
    except Exception as info:
        logger.critical("[xxx] MainError: %s:%s" % (str(Exception), info))
        data = e.buildHtml.getData()
        aax = "error:%s urlconfig:%s date:%s" % (
            str(Exception) + " " + str(info), str(urlconfig), data)
        createIssueForBlog(aax)
        exit()