Ejemplo n.º 1
0
def domain_lookup():
    r = Resolver()
    r.timeout = r.lifetime = 8.0
    while True:
        try:
            host = q_hosts.get(timeout=0.1)
            print "[%s] host=> %s" % ("domain_lookup", host)
        except:
            break
        _schema, _host, _path = parse_url(host)
        try:
            m = re.search('\d+\.\d+\.\d+\.\d+', _host.split(':')[0])
            if m:
                q_targets.put({'file': '', 'url': host})
                ips_to_scan.append(m.group(0))
            else:
                answers = r.query(_host.split(':')[0])
                if answers:
                    q_targets.put({'file': '', 'url': host})
                    for _ in answers:
                        ips_to_scan.append(_.address)
        except Exception, e:
            lock.acquire()
            print '[%s][Warning] Invalid domain:', (get_time(), host)
            lock.release()
Ejemplo n.º 2
0
def domain_lookup(q_targets, q_hosts, lock, ips_to_scan):
    r = Resolver()
    r.timeout = r.lifetime = 8.0
    while True:
        try:
            host = q_hosts.get(timeout=0.1)
        except:
            break
        _schema, _host, _path = parse_url(host)
        #print "_schema:{0}\t_host:{1}\t_path:{2}".format(_schema, _host, _path)
        #print _host.split(":")[0]
        try:
            m = re.search('\d+\.\d+\.\d+\.\d+', _host.split(':')[0])
            if m:
                q_targets.put({'file': '', 'url': host})
                ips_to_scan.append(m.group(0))
                #print "in try->if"
            else:
                # 无论查不查的到都将这个url放在target中
                q_targets.put({'file': '', 'url': host})
                answers = r.query(_host.split(':')[0])
                if answers:
                    for _ in answers:
                        ips_to_scan.append(_.address)
        except Exception, e:
            lock.acquire()
            print '[%s][Warning] Invalid domain: [%s]' % (get_time(), host)
            print str(e)
            lock.release()
Ejemplo n.º 3
0
 def _parse_url_from_file(self):
     with open(self.file) as inFile:
         line = inFile.readline().strip()
         if line:
             url = line.split()[1]
         else:
             url = ''
         return parse_url(url)
Ejemplo n.º 4
0
 def _parse_url_from_file(self):
     with open(self.file) as inFile:
         line = inFile.readline().strip()
         if line:
             url = line.split()[1]
         else:
             url = ''
         return parse_url(url)
Ejemplo n.º 5
0
 def init_from_url(self, url):
     self.init_reset()
     if not url.find('://') > 0:
         self.url = 'http://' + url
     else:
         self.url = url
     self.schema, self.host, self.path = parse_url(url)
     self.init_final()
Ejemplo n.º 6
0
 def init_from_url(self, url):
     self.init_reset()
     if not url.find('://') > 0:
         self.url = 'http://' + url
     else:
         self.url = url
     self.schema, self.host, self.path = parse_url(url)
     self.init_final()
Ejemplo n.º 7
0
 def _parse_url_from_file(self):
     url = ''
     with open(self.log_file) as infile:
         for line in infile.xreadlines():
             line = line.strip()
             if line and len(line.split()) >= 3:
                 url = line.split()[1]
                 break
     return parse_url(url)
Ejemplo n.º 8
0
 def _parse_url_from_file(self):
     url = ''
     with open(self.log_file) as infile:
         for line in infile.xreadlines():
             line = line.strip()
             if line and len(line.split()) >= 2:
                 url = line.split()[1]
                 break
     return parse_url(url)
Ejemplo n.º 9
0
def domain_lookup():
    while True:
        try:
            host = queue_hosts.get(timeout=0.1)
        except Queue.Empty as e:
            break
        _schema, _host, _path = parse_url(host)
        try:
            m = re.search(r'\d+\.\d+\.\d+\.\d+', _host.split(':')[0])
            if m:
                q_targets.put({'file': '', 'url': host})
                ips_to_scan.append(m.group(0))
            else:
                ip = socket.gethostbyname(_host.split(':')[0])
                q_targets.put({'file': '', 'url': host})
                ips_to_scan.append(ip)
        except Exception as e:
            print e
            print_msg('[Warning] Invalid domain <%s>' % _host.split(':')[0])
Ejemplo n.º 10
0
def domain_lookup():
    r = Resolver()
    r.timeout = r.lifetime = 10.0
    # r.nameservers = ['182.254.116.116', '223.5.5.5'] + r.nameservers
    while True:
        try:
            host = queue_hosts.get(timeout=0.1)
        except:
            break
        _schema, _host, _path = parse_url(host)
        try:
            m = re.search('\d+\.\d+\.\d+\.\d+', _host.split(':')[0])
            if m:
                q_targets.put({'file': '', 'url': host})
                ips_to_scan.append(m.group(0))
            else:
                answers = r.query(_host.split(':')[0])
                if answers:
                    q_targets.put({'file': '', 'url': host})
                    for _ in answers:
                        ips_to_scan.append(_.address)
        except Exception as e:
            print_msg('Invalid domain: %s' % host)
Ejemplo n.º 11
0
def domain_lookup():
    r = Resolver()
    r.timeout = r.lifetime = 8.0
    while True:
        try:
            host = q_hosts.get(timeout=0.1)
        except:
            break
        _schema, _host, _path = parse_url(host)
        try:
            m = re.search('\d+\.\d+\.\d+\.\d+', _host.split(':')[0])
            if m:
                q_targets.put({'file': '', 'url': host})
                ips_to_scan.append(m.group(0))
            else:
                answers = r.query(_host.split(':')[0])
                if answers:
                    q_targets.put({'file': '', 'url': host})
                    for _ in answers:
                        ips_to_scan.append(_.address)
        except Exception, e:
            lock.acquire()
            print '[%s][Warning] Invalid domain:', (get_time(), host)
            lock.release()
Ejemplo n.º 12
0
            STOP_ME = False
            threading.Thread(target=save_report_thread, args=(q_results, file)).start()
            print '[%s] Report thread created, prepare target Queue...' % get_time()

            if args.crawler:
                _input_files = glob.glob(args.crawler + '/*.log')
                for _file in _input_files:
                    q_targets.put({'file': _file, 'url': ''})

            if args.host or args.f or args.d:
                for line in lines:
                    if line.strip():
                        hosts = line.strip().split()
                        for host in hosts:
                            host = host.strip(',')    # Work with https://github.com/lijiejie/subDomainsBrute
                            _schema, _host, _path = parse_url(host)
                            try:
                                ip = socket.gethostbyname(_host.split(':')[0])
                                if ip:
                                    scanned_ips.append(ip)
                                    q_targets.put({'file': '', 'url': host})
                            except Exception, e:
                                pass

                if args.network != 32:
                    for ip in scanned_ips:
                        if ip.find('/') > 0:
                            continue
                        _network = u'%s/%s' % ('.'.join( ip.split('.')[:3] ), args.network)
                        if _network in scanned_ips:
                            continue
Ejemplo n.º 13
0
            )

            if args.crawler:
                _input_files = glob.glob(args.crawler + '/*.log')
                for _file in _input_files:
                    q_targets.put({'file': _file, 'url': ''})

            if args.host or args.f or args.d:
                for line in lines:
                    if line.strip():
                        hosts = line.strip().split()
                        for host in hosts:
                            host = host.strip(
                                ','
                            )  # Work with https://github.com/lijiejie/subDomainsBrute
                            _schema, _host, _path = parse_url(host)
                            try:
                                ip = socket.gethostbyname(_host.split(':')[0])
                                if ip:
                                    scanned_ips.append(ip)
                                    q_targets.put({'file': '', 'url': host})
                            except Exception, e:
                                pass

                if args.network != 32:
                    for ip in scanned_ips:
                        if ip.find('/') > 0:
                            continue
                        _network = u'%s/%s' % ('.'.join(
                            ip.split('.')[:3]), args.network)
                        if _network in scanned_ips:
Ejemplo n.º 14
0
 def init_from_url(self, url):
     self.reset_scanner()
     self.url = 'http://' + url if url.find('://') < 0 else url
     self.schema, self.host, self.path = parse_url(url)
     self.domain_sub = get_domain_sub(self.host)
     self.init_final()
Ejemplo n.º 15
0
    def init_final(self):
        try:
            if self.conn_pool:
                self.conn_pool.close()
        except Exception as e:
            pass
        default_port = 443 if self.schema.lower() == 'https' else 80
        self.host, self.port = self.host.split(
            ':') if self.host.find(':') > 0 else (self.host, default_port)
        self.port = int(self.port)
        if self.schema == 'http' and self.port == 80 or self.schema == 'https' and self.port == 443:
            self.base_url = '%s://%s' % (self.schema, self.host)
        else:
            self.base_url = '%s://%s:%s' % (self.schema, self.host, self.port)

        is_port_open = self.is_port_open()
        if is_port_open:
            if self.schema == 'https':
                self.conn_pool = HTTPSConnPool(self.host,
                                               port=self.port,
                                               maxsize=self.args.t * 2,
                                               headers=HEADERS)
            else:
                self.conn_pool = HTTPConnPool(self.host,
                                              port=self.port,
                                              maxsize=self.args.t * 2,
                                              headers=HEADERS)
                # 301 redirect to https
                status, headers, html_doc = self.http_request('/')
                location = headers.get('Location', '')
                if status == 301 and location.startswith('https://'):
                    self.base_url = location.rstrip('/')
                    _, loc_host, _ = parse_url(location)
                    port = int(loc_host.split(':')
                               [1]) if loc_host.find(':') > 0 else 443
                    self.conn_pool = HTTPSConnPool(self.host,
                                                   port=port,
                                                   maxsize=self.args.t * 2,
                                                   headers=HEADERS)
                    print_msg('301 redirect: %s' % location)

        if self.args.scripts_only or (not is_port_open
                                      and not self.args.no_scripts):
            for _ in self.user_scripts:
                self.url_queue.put((_, '/'))
            print_msg('Scan with scripts: %s' % self.host)
            return

        if not is_port_open:
            return

        self.max_depth = cal_depth(self, self.path)[1] + 5
        if self.args.no_check404:
            self._404_status = 404
            self.has_status_404 = True
        else:
            self.check_404_existence()
        if self._404_status == -1:
            print_msg('[Warning] HTTP 404 check failed <%s:%s>' %
                      (self.host, self.port))
        elif not self.has_status_404:
            print_msg('[Warning] %s has no HTTP 404.' % self.base_url)
        _path, _depth = cal_depth(self, self.path)
        self.enqueue('/')
        self.enqueue(_path)
        if not self.args.no_crawl and not self.log_file:
            self.crawl(_path)