示例#1
0
    async def run(self):
        async with aiohttp.ClientSession() as session:

            flag = await self.check_engine_available(session, self.engine)
            if not flag:
                logger.error(
                    "{engine_name} is not available, skipping!".format(
                        engine_name=self.engine_name))
                return
            logger.debug("{engine_name} is available, starting!".format(
                engine_name=self.engine_name))

            data = {'inputurl': self.target}
            content = await self.get(session,
                                     self.base_url,
                                     method="POST",
                                     data=data,
                                     headers=self.headers,
                                     timeout=self.timeout,
                                     proxy=self.proxy)

            ret = self.check_response_errors(content)
            if not ret[0]:
                self.deal_with_errors(ret[1])

            self.extract(content)
            logger.sysinfo("{engine} Found {num} sites".format(
                engine=self.engine_name, num=len(self.results['subdomain'])))
            logger.debug(self.engine_name + " " +
                         str(len(self.results['subdomain'])))
示例#2
0
def _run(domains_dic, vul_scan_flag):
    now_time = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())
    filename = 'srcscan_subdomain_check_' + time.strftime(
        "%Y%m%d_%H%M%S", time.localtime()) + '.xlsx'
    path = os.path.join(
        os.path.dirname(os.path.dirname(os.path.abspath(__file__))), "data")
    if not os.path.exists(path):
        os.makedirs(path)
    for key in domains_dic.keys():
        domains = list(set(domains_dic[key]))
        if len(domains) > 0:
            logger.sysinfo(
                "Scanning %d domains at %s." %
                (len(domains),
                 time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())))
            for domain in domains:
                ret = set()
                ret = subdomain_scan(domain, ret, now_time)
                title_scan(domain, ret, now_time)
                if vul_scan_flag:
                    vul_scan(domain, now_time)

            logger.sysinfo(
                "Fineshed scan %d domains at %s." %
                (len(domains),
                 time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())))

            save(domains, path, filename, key)

        else:
            logger.error("Loading %d domains." % (len(domains)))
    send_smtp(path, filename)
示例#3
0
def title_scan(domain, ret, now_time):
    ret = list(ret)
    database = Database(
        os.path.join(
            os.path.dirname(os.path.dirname(os.path.abspath(__file__))),
            'srcscan.db'))
    database.connect()
    database.init()
    logger.sysinfo('Checking %d subdomains of %s.' % (len(ret), domain))
    loop = asyncio.get_event_loop()
    thread_num = int(conf['config']['basic']['thread_num'])
    thread_num = thread_num if len(ret) > thread_num else thread_num
    tasks = []
    for i in range(0, thread_num):
        tasks.append(
            asyncio.ensure_future(
                get_title([ret[x]
                           for x in range(0 + i, len(ret), thread_num)])))
    loop.run_until_complete(asyncio.wait(tasks))
    for task in tasks:
        for subdomain, url, title, status, content_length in task.result():
            database.update_subdomain_status(subdomain, url, title, status,
                                             content_length, now_time)
    database.disconnect()
    logger.sysinfo("Checked subdomains' status of %s." % domain)
示例#4
0
    async def run(self):
        async with aiohttp.ClientSession() as session:

            flag = await self.check_engine_available(session, self.engine)
            if not flag:
                logger.error(
                    "{engine_name} is not available, skipping!".format(
                        engine_name=self.engine_name))
                return
            logger.debug("{engine_name} is available, starting!".format(
                engine_name=self.engine_name))

            data = {'inputurl': self.target}
            async with session.post(self.base_url, proxy=self.proxy,
                                    data=data) as res:
                if res != None:
                    try:
                        content = await res.text()
                    except:
                        content = ""

                    ret = self.check_response_errors(content)
                    if not ret[0]:
                        self.deal_with_errors(ret[1])

                    self.extract(content)

            logger.sysinfo("{engine} Found {num} sites".format(
                engine=self.engine_name, num=len(self.results['subdomain'])))
            logger.debug(self.engine_name + " " +
                         str(len(self.results['subdomain'])))
示例#5
0
def send_smtp(path, filename):
    try:
        mail_host = conf['config']['smtp']['mail_host'].strip()
        mail_port = int(conf['config']['smtp']['mail_port'])
        mail_user = conf['config']['smtp']['mail_user']
        mail_pass = conf['config']['smtp']['mail_pass']
        timeout = int(conf['config']['basic']['timeout'])
        sender = conf['config']['smtp']['sender']
        receivers = conf['config']['smtp']['receivers'].split(',')
    except:
        logger.error(
            "Load config error: smtp, please check the config in srcscan.conf."
        )
        return

    content = '''
    你好,

        srcscan 子域名检测结果 【%s】,请查收。

                                                                                —— by srcscan
    ''' % (filename)
    message = MIMEMultipart()
    message['From'] = "srcscan<%s>" % sender
    message['To'] = ','.join(receivers)
    message['Subject'] = Header(filename, 'utf-8')
    message.attach(MIMEText(content, 'plain', 'utf-8'))

    with open(os.path.join(path, filename), 'rb') as f:
        att = MIMEText(f.read(), 'base64', 'utf-8')
        att["Content-Type"] = 'application/octet-stream'
        att.add_header("Content-Disposition",
                       "attachment",
                       filename=("utf-8", "", filename))
        message.attach(att)

    n = 3
    while n > 0:
        try:
            socket.setdefaulttimeout(timeout)
            smtpObj = smtplib.SMTP_SSL(host=mail_host)
            smtpObj.connect(mail_host, mail_port)
            smtpObj.login(mail_user, mail_pass)
            smtpObj.sendmail(sender, receivers, message.as_string())
            logger.sysinfo("SMTP send success.")
            break
        except smtplib.SMTPException as e:
            logger.error("Error for SMTP: %s" % (str(e)))
        except socket.timeout as e:
            logger.error("Timeout for SMTP.")
        except Exception as e:
            print(str(e))
            logger.error(
                "Error for SMTP, please check SMTP' config in srcscan.conf.")
        time.sleep(10)
        n -= 1
示例#6
0
def read_domain_file(domain_file, domains_dic):
    domains_dic[os.path.basename(domain_file)] = []
    logger.sysinfo("Loading and checking domains of file %s." % domain_file)
    with open(domain_file, 'r') as f:
        for d in f.readlines():
            domain = check_domain(d)
            if not domain and d.strip() != '':
                logger.error("Error domain: %s" % d)
                continue
            domains_dic[os.path.basename(domain_file)].append(domain)
    return domains_dic
示例#7
0
def update_program():
    git_repository = "https://github.com/orleven/srcscan.git"
    success = False
    path = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
    if not os.path.exists(os.path.join(path, ".git")):
        msg = "Have not a git repository. Please checkout the 'srcscan' repository "
        msg += "from GitHub (e.g. 'git clone --depth 1 https://github.com/orleven/srcscan.git srcscan')"
        logger.error(msg)
    else:
        msg = "Updating srcscan to the latest version from the gitHub repository."
        logger.sysinfo(msg)

        msg = "The srcscan will try to update itself using 'git' command."
        logger.sysinfo(msg)

        logger.sysinfo("Update in progress.")

    try:
        process = subprocess.Popen(
            "git checkout . && git pull %s HEAD" % git_repository,
            shell=True,
            stdout=subprocess.PIPE,
            stderr=subprocess.PIPE,
            cwd=path.encode(locale.getpreferredencoding())
        )  # Reference: http://blog.stastnarodina.com/honza-en/spot/python-unicodeencodeerror/
        poll_process(process, True)
        stdout, stderr = process.communicate()
        success = not process.returncode
    except (IOError, OSError) as ex:
        success = False
        logger.error(type(ex).__name__)

    if success:
        logger.success("The latest revision '%s'" % (get_revision_number()))
    else:
        if "Not a git repository" in stderr:
            msg = "Not a valid git repository. Please checkout the 'orleven/srcscan' repository "
            msg += "from GitHub (e.g. 'git clone --depth 1 https://github.com/orleven/srcscan.git srcscan')"
            logger.error(msg)
        else:
            logger.error("Update could not be completed ('%s')" %
                         re.sub(r"\W+", " ", stderr).strip())

    if not success:
        if sys.platform == 'win32':
            msg = "for Windows platform it's recommended "
            msg += "to use a GitHub for Windows client for updating "
            msg += "purposes (http://windows.github.com/) or just "
            msg += "download the latest snapshot from "
            msg += "https://github.com/orleven/srcscan"
        else:
            msg = "For Linux platform it's required "
            msg += "to install a standard 'git' package (e.g.: 'sudo apt-get install git')"

        logger.sysinfo(msg)
示例#8
0
文件: common.py 项目: orleven/srcscan
def tocsv(datalines, path, file, key='Mysheet'):
    filename = os.path.join(path, file)
    logger.info('Export to %s...' % (filename))
    if os.path.isfile(filename):
        book = load_workbook(filename=filename)
    else:
        book = Workbook()
        book.remove(book.active)
    if key not in book.sheetnames:
        ws = book.create_sheet(key)
    else:
        ws = book.get_sheet_by_name(key)
    i = 1
    titleList = []
    for line in datalines:
        i = i + 1
        for key in line:
            if key not in titleList:
                titleList.append(key)
                ws.cell(row=1, column=len(titleList)).value = key
            try:
                if line[key] == None or line[key] == '':
                    ws.cell(row=i, column=titleList.index(key) + 1).value = ""
                elif isinstance(line[key], int) or isinstance(line[key], str):
                    ws.cell(row=i,
                            column=titleList.index(key) + 1).value = line[key]
                elif isinstance(line[key], bytes):
                    ws.cell(row=i,
                            column=titleList.index(key) + 1).value = str(
                                line[key], 'utf-8')
                elif isinstance(line[key], list):
                    ws.cell(row=i, column=titleList.index(key) +
                            1).value = str(line[key])
                elif isinstance(line[key], dict):
                    ws.cell(row=i, column=titleList.index(key) +
                            1).value = str(line[key])
                else:
                    ws.cell(row=i, column=titleList.index(key) +
                            1).value = "Types of printing are not supported."
            except:
                ws.cell(row=i,
                        column=titleList.index(key) + 1).value = "Some error."
    book.save(filename)
    logger.sysinfo('Exported to %s successful!' % (filename))
示例#9
0
def run(target, vul_scan):
    domains_dic = {}

    if os.path.isdir(target):
        domain_file_list = glob.glob(
            os.path.join(
                os.path.dirname(os.path.dirname(os.path.abspath(__file__))),
                target, '*.*'))
        for domain_file in domain_file_list:
            domains_dic = read_domain_file(domain_file, domains_dic)

    elif os.path.isfile(target):
        domains_dic = read_domain_file(target, domains_dic)

    elif check_domain(target):
        logger.sysinfo("Loading and checking domain %s." % target)
        domains_dic[target] = [target]

    else:
        sys.exit(logger.error("Error domain: %s" % target))
    _run(domains_dic, vul_scan)
示例#10
0
文件: core.py 项目: j14ncn/submon
def run(target):
    domains_dic = {}
    if os.path.isdir(target):
        domain_file_list = glob.glob(os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))), target, '*.*'))
        for domain_file in domain_file_list:
            domains_dic[os.path.basename(domain_file)] = []
            logger.sysinfo("Loading and checking domains of file %s." % domain_file)
            with open(domain_file, 'r') as f:
                for domain in f.readlines():
                    domain = check_domain(domain)
                    if not domain:
                        logger.error("Error domain: %s" % domain)
                        continue
                    domains_dic[os.path.basename(domain_file)].append(domain)
    elif os.path.isfile(target):
        domains_dic[os.path.basename(target)] = []
        logger.sysinfo("Loading and checking domains of file %s." % target)
        with open(target, 'r') as f:
            for domain in f.readlines():
                domain = check_domain(domain)
                if not domain:
                    logger.error("Error domain: %s" % domain)
                    continue
                domains_dic[os.path.basename(target)].append(domain)
    elif check_domain(target):
        logger.sysinfo("Loading and checking domain %s." % target)
        domains_dic[target] = [target]
    else:
        sys.exit(logger.error("Error domain: %s" % target))
    _run(domains_dic)
示例#11
0
文件: submon.py 项目: thatqier/submon
def handle(parser):
    args = parser.parse_args()
    banner()
    check_update(args)
    config_parser()
    domains_dic = {}
    # asyncio.set_event_loop_policy(uvloop.EventLoopPolicy())

    if args.debug:
        debug = True
        logger.set_level(CUSTOM_LOGGING.DEBUG)
    nomal = args.nomal
    if args.help:
        parser.print_help()
    elif args.domain:
        domain = check_domain(args.domain)
        if not domain:
            sys.exit(logger.error("Error domain: %s" % domain))
        logger.sysinfo("Loading and checking domain %s." % args.domain)
        domains_dic[domain]=[domain]
        run(domains_dic,nomal)
    elif args.domain_file:
        if os.path.isdir(args.domain_file):
            domain_file_list = glob.glob(os.path.join(os.path.dirname(os.path.abspath(__file__)), "domain",'*.*'))
            for domain_file in domain_file_list:
                domains_dic[os.path.basename(domain_file)] = []
                logger.sysinfo("Loading and checking domains of file %s." % args.domain_file)
                with open(domain_file, 'r') as f:
                    for domain in f.readlines():
                        domain = check_domain(domain)
                        if not domain:
                            logger.error("Error domain: %s" % domain)
                            continue
                        domains_dic[os.path.basename(domain_file)].append(domain)
            run(domains_dic,nomal)
        elif os.path.isfile(args.domain_file):
            domains_dic[os.path.basename(args.domain_file)] = []
            logger.sysinfo("Loading and checking domains of file %s." % args.domain_file)
            with open(args.domain_file, 'r') as f:
                for domain in  f.readlines():
                    domain = check_domain(domain)
                    if not domain:
                        logger.error("Error domain: %s" % domain)
                        continue
                    domains_dic[os.path.basename(args.domain_file)].append(domain)
            run(domains_dic,nomal)
        else:
            logger.sysinfo("Error for domain file, please check the file %s." % args.domain_file)
    else:
        parser.print_help()
示例#12
0
def subdomain_scan(domain, ret, now_time):
    database = Database(
        os.path.join(
            os.path.dirname(os.path.dirname(os.path.abspath(__file__))),
            'srcscan.db'))
    database.connect()
    database.init()
    logger.sysinfo("Scanning domain %s." % domain)
    _engines = [_(domain) for _ in engines.values()]
    loop = asyncio.get_event_loop()
    if debug:
        loop.set_debug(True)
    for task in [asyncio.ensure_future(_engine.run()) for _engine in _engines]:
        loop.run_until_complete(task)
    # loop.close()

    for _engine in _engines:
        logger.sysinfo("{engine} Found {num} sites".format(
            engine=_engine.engine_name, num=len(_engine.results['subdomain'])))
        ret.update(_engine.results['subdomain'])
    logger.sysinfo("Found %d subdomains of %s." % (len(ret), domain))
    for subdomain in ret:
        database.insert_subdomain(subdomain, None, None, 0, 0, now_time,
                                  domain)
    database.disconnect()
    return ret
示例#13
0
def vul_scan(domain, now_time):
    datas = []
    database = Database(
        os.path.join(
            os.path.dirname(os.path.dirname(os.path.abspath(__file__))),
            'srcscan.db'))
    database.connect()
    database.init()
    logger.sysinfo("Scaning vul for: %s " % (domain))
    for _row in database.select_mondomain(domain):
        data = {
            "subdomain": _row[0],
            "url": _row[1],
            "title": _row[2],
            "status": _row[3],
            "len": _row[4],
            "update_time": _row[5],
            "domain": _row[6]
        }
        datas.append(data)

    for data in datas:
        if data['status'] != 0:
            logger.sysinfo("Scaning vul for %s." % (data['url']))
            crawlergo_scan(data['url'], data['domain'], now_time, database)

    logger.sysinfo("Scaned vul for: %s " % (domain))
    database.disconnect()
示例#14
0
def crawlergo_scan(url, domain, now_time, database):
    cmd = [
        conf['config']['crawlergo']['crawlergo_path'], "-c",
        conf['config']['crawlergo']['chrome_path'], "-t", "20", "-f", "smart",
        "--fuzz-path", "--output-mode", "json", url
    ]
    rsp = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
    output, error = rsp.communicate()
    try:
        result = json.loads(output.decode().split("--[Mission Complete]--")[1])
        _req_list = result["req_list"]
        sub_domain_list = result["sub_domain_list"]
    except:
        return
    req_list = []
    for req in _req_list:
        if url.strip('http://').strip('https://').rstrip('/') in req['url']:
            req_list.append(req)
        else:
            logger.sysinfo("Skip %s url by %s." % (req['url'], url))

    logger.sysinfo("Found %d url by %s." % (len(req_list), url))
    logger.sysinfo("Found %d subdomains by %s." % (len(sub_domain_list), url))
    for subdomain in sub_domain_list:
        database.insert_subdomain(subdomain, None, None, 0, 0, now_time,
                                  domain)

    # logger.sysinfo('Checking %d subdomains by %s.' % (len(sub_domain_list), url))
    # for subdomain, url, title, status, content_length in curl.run():
    #     database.update_subdomain_status(subdomain, url, title, status, content_length, now_time)
    # logger.sysinfo("Checked subdomains' status by %s." % url)

    loop = asyncio.get_event_loop()
    thread_num = int(conf['config']['basic']['thread_num'])
    thread_num = thread_num if len(req_list) > thread_num else thread_num
    tasks = []
    for i in range(0, thread_num):
        tasks.append(
            asyncio.ensure_future(
                go_request([
                    req_list[x]
                    for x in range(0 + i, len(req_list), thread_num)
                ], url)))
    loop.run_until_complete(asyncio.wait(tasks))
示例#15
0
def poll_process(process, suppress_errors=False):
    """
    Checks for process status (prints . if still running)
    """

    while True:
        time.sleep(1)

        returncode = process.poll()

        if returncode is not None:
            if not suppress_errors:
                if returncode == 0:
                    logger.sysinfo(" done\n")
                elif returncode < 0:
                    logger.sysinfo(" process terminated by signal %d\n" % returncode)
                elif returncode > 0:
                    logger.sysinfo(" quit unexpectedly with return code %d\n" % returncode)

            break
示例#16
0
文件: core.py 项目: j14ncn/submon
def _run(domains_dic):
    database = Database(os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))), 'submon.db'))
    database.connect()
    database.init()
    now_time = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())
    filename = 'SubMon_subdomain_check_' + time.strftime("%Y%m%d_%H%M%S", time.localtime()) + '.xlsx'
    path = os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))), "data")
    if not os.path.exists(path):
        os.makedirs(path)
    for key in domains_dic.keys():
        domains = list(set(domains_dic[key]))
        if len(domains) > 0:
            logger.sysinfo("Scanning %d domains at %s." % (len(domains), time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())))
            for domain in domains:
                logger.sysinfo("Scanning domain %s." % domain)
                _engines = [_(domain) for _ in engines.values()]
                loop = asyncio.get_event_loop()
                if debug:
                    loop.set_debug(True)
                for task in [asyncio.ensure_future(_engine.run()) for _engine in _engines ]:
                    loop.run_until_complete(task)
                # loop.close()
                ret = set()
                for _engine in _engines:
                    logger.sysinfo("{engine} Found {num} sites".format(engine=_engine.engine_name,
                                                                       num=len(_engine.results['subdomain'])))
                    ret.update(_engine.results['subdomain'])


                logger.sysinfo("Found %d subdomains of %s." % (len(ret),domain))
                for subdomain in ret:
                    database.insert_subdomain(subdomain,None,None,0,0,now_time,domain)

                logger.sysinfo('Checking %d subdomains of %s.' % (len(ret),domain))
                curl = Curl()
                curl.load_targets(ret)
                for subdomain,url,title,status,content_length in curl.run():
                    database.update_subdomain_status(subdomain,url,title,status,content_length,now_time)
                logger.sysinfo("Checked subdomains' status of %s." % domain)

            datas = []
            for domain in domains:
                for _row in database.select_mondomain(domain):
                    data = {
                        "subdomain": _row[0],
                        "url": _row[1],
                        "title": _row[2],
                        "status": _row[3],
                        "len": _row[4],
                        "update_time" : _row[5],
                        "domain": _row[6]
                    }
                    datas.append(data)
            tocsv(datas, path,filename,key)
            logger.sysinfo("Fineshed scan %d domains at %s." % (len(domains), time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())))
        else:
            logger.error("Loading %d domains." % (len(domains)))
    send_smtp(path, filename)
    database.disconnect()
    print()
    print()