示例#1
0
 def up():
     ErrorsCounter.counter += 1
     if not ErrorsCounter.reported and ErrorsCounter.counter / (int(
             Registry().get('config')['main']['errors_limit']) / 100) > 70:
         Registry().get("logger").log(
             "\nAttention! Too many errors, 70% of limit!")
         ErrorsCounter.reported = True
示例#2
0
    def _get_extensions(self):
        """ Build files extensions list """
        result = {}
        coll = Registry().get('mongo').spider_urls
        links = coll.group({'path': True}, '', {}, 'function () {}')
        links = mongo_result_to_list(links)

        exts = []
        for link in links:
            if link['path'].rfind('.') > -1 and len(
                    link['path']) - link['path'].rfind('.') <= 5:
                exts.append(link['path'][link['path'].rfind('.'):])

        for ext in list(set(exts)):
            if ext not in result:
                result[ext] = []

            links = coll.find({'path': re.compile('\\' + ext + '$')})
            links = mongo_result_to_list(links)

            for link in links:
                result[ext].append(
                    link['path'] + '?' +
                    link['query'] if link['query'] else link['path'])

        return result
示例#3
0
    def add_action(self):
        """ Action add of module """
        pid = Registry().get('pData')['id']
        self.validate_main()

        if self.model.exists(pid, self.options['host'].value,
                             self.options['url'].value):
            raise WSException(
                "URL '{0}' already exists in this project in host '{1}'!".
                format(self.options['url'].value, self.options['host'].value))

        host_id = HostsModel().get_id_by_name(pid, self.options['host'].value)

        if (self.options['url'].value[-1] == '/' and
                self.model.exists(pid, self.options['host'].value, self.options['url'].value[:-1])) or\
            (self.options['url'].value[-1] != '/' and
             self.model.exists(pid, self.options['host'].value, self.options['url'].value + "/")):
            if raw_input(
                    'Url {0} have analogue in database (with or without end slash). '
                    'Are you realy want to add it (y/n)?'.format(
                        self.options['url'].value)).lower()[0] != 'y':
                print "Url {0} was not added!".format(
                    self.options['url'].value)
                return

        self.model.add(Registry().get('pData')['id'], host_id,
                       self.options['url'].value)

        print " URL '{0}' successfully added to host '{1}'".\
              format(self.options['url'].value, self.options['host'].value)
示例#4
0
 def q(self, sql, return_curs=False):
     """ Usual query, return cursor """
     for i in range(1, self._restart_by_deadlock_limit + 1):
         try:
             curs = self._db.cursor(buffered=True)
             curs.execute(sql)
         except mysql.connector.errors.OperationalError as ex:
             if "MySQL Connection not available" in str(ex):
                 self.connect()
                 Registry().get('logger').log(
                     "database", "Reconnect on '{0}'".format(sql))
                 return self.q(sql, return_curs)
             else:
                 raise ex
         except mysql.connector.errors.DatabaseError as e:
             if str(e).count("Lock wait timeout exceeded") or str(e).count(
                     "Deadlock found when trying to get lock"):
                 Registry().get('logger').log(
                     "database", "Deadlock '{0}', try {1} ".format(sql, i))
                 if i == self._restart_by_deadlock_limit:
                     curs = self._db.cursor()
                     curs.execute(sql)
                 else:
                     time.sleep(self._sleep_by_deadlock_restart)
                     continue
             else:
                 raise e
         break
     if return_curs:
         return curs
     else:
         curs.close()
示例#5
0
    def __init__(
            self, queue, protocol, host, url, method, mask_symbol, not_found_re,
            not_found_codes, retest_codes, delay, counter, result):
        threading.Thread.__init__(self)
        self.retested_words = {}
        
        self.queue = queue
        self.protocol = protocol.lower()
        self.host = host
        self.method = method if not (len(not_found_re) and method.lower() == 'head') else 'get'
        self.url = url
        self.mask_symbol = mask_symbol
        self.counter = counter
        self.result = result
        self.done = False
        self.not_found_re = False if not len(not_found_re) else re.compile(not_found_re)

        not_found_codes = not_found_codes.split(',')
        not_found_codes.append('404')
        self.not_found_codes = list(set(not_found_codes))
        self.retest_codes = list(set(retest_codes.split(','))) if len(retest_codes) else []

        self.delay = int(delay)

        self.http = copy.deepcopy(Registry().get('http'))
        self.logger = Registry().get('logger')
示例#6
0
 def factory(_all=0):
     """ Build counter object by WS config """
     return WSCounter(
         int(Registry().get('config')['main']['counter_step']),
         int(Registry().get('config')['main']['counter_step']) *
         int(Registry().get('config')['main']
             ['counter_steps_for_new_string']), _all)
示例#7
0
    def __init__(self, queue, domain, protocol, method, not_found_re, not_found_codes,
                 not_found_size, delay, counter, result):
        super(BackupsFinderThread, self).__init__()
        self.queue = queue
        self.domain = domain
        self.result = result
        self.counter = counter
        self.protocol = protocol
        self.not_found_re = False if not len(not_found_re) else re.compile(not_found_re)
        self.not_found_size = int(not_found_size)
        self.method = method.lower()

        self.method = method if \
            not ((len(not_found_re) or self.not_found_size != -1) and method.lower() == 'head') else \
            'get'

        not_found_codes = not_found_codes.split(',')
        not_found_codes.append('404')
        self.not_found_codes = list(set(not_found_codes))

        self.delay = int(delay)

        self.done = False
        self.http = Registry().get('http')
        self.logger = Registry().get('logger')
示例#8
0
 def __init__(self):
     self.tags = {}
     tags = Registry().get('config')['spider']['tags']
     tags = tags.split(",")
     for tag in tags:
         tag = tag.split("|")
         self.tags[tag[0]] = tag[1]
示例#9
0
 def __init__(self):
     self.tags = {}
     tags = Registry().get('config')['spider']['tags']
     tags = tags.split(",")
     for tag in tags:
         tag = tag.split("|")
         self.tags[tag[0]] = tag[1]
示例#10
0
 def is_critical_stop(self):
     """
     Is we has critical sign for immidiately stop?
     :return:
     """
     return Registry().get('proxy_many_died') or Registry().get(
         'positive_limit_stop') or ErrorsCounter.is_limit()
示例#11
0
    def load_file(self, _file):
        """ Fill queue from text file """
        self.collection.drop()

        fh = open(_file)

        last = "START OF FILE"
        while True:
            line = fh.readline()
            if not line:
                break
            if not line.strip() and self.skip_blank_rows:
                continue

            try:
                line = line.strip()
                unicode(line)
                self.collection.insert(self.build_row(line))
            except UnicodeDecodeError:
                _str = " UNICODE ERROR: In file '{0}' skip word '{1}', after word '{2}' !".format(_file, line, last)
                if Registry().isset('logger'):
                    Registry().get('logger').log(_str)
                else:
                    print _str
                continue

            last = line

        fh.close()

        self.collection.create_index('name', drop_dups=True, unique=self.unique)

        self.load_data()

        return self.collection.count()
示例#12
0
文件: Http.py 项目: Sts0mrg0/ws-cli
    def head(self, url, verify=None, allow_redirects=None, headers=None):
        """ HTTP HEAD request """
        if self.every_request_new_session:
            self.session = requests.Session()
        verify = self.verify if verify is None else verify
        allow_redirects = self.allow_redirects if allow_redirects is None else allow_redirects
        headers = self.headers if headers is None else headers

        resp = self.session.head(url,
                                 verify=verify,
                                 allow_redirects=allow_redirects,
                                 headers=headers,
                                 proxies=self.get_current_proxy())

        if 'content-length' in resp.headers and \
                        int(resp.headers['content-length']) > int(Registry().get('config')['main']['max_size']):
            self.errors['maxsize'].append(
                "URL {0} has size {1} bytes, but limit in config - {2} bytes".\
                format(
                    url,
                    resp.headers['content-length'],
                    Registry().get('config')['main']['max_size']
                )
            )
            resp = None
        return resp
示例#13
0
    def _options_to_registry(self):
        if self.options['ignore'].value:
            Registry().set('ignore_regexp',
                           re.compile(self.options['ignore'].value))

        expr = ''
        for ext in Registry().get('config')['spider']['allow_exts'].split(','):
            expr += r'\.' + ext.strip() + '$|'
        expr = expr.rstrip('|')
        Registry().set('allow_regexp', re.compile(expr, re.I))

        if self.options['only_one'].value:
            tmp = self.options['only_one'].value.split("||")
            if len(tmp):
                only_one = []
                for regex in tmp:
                    only_one.append({'regex': regex, 'block': False})
                Registry().set('only_one', only_one)

        denied_schemes = parse_split_conf(
            Registry().get('config')['spider']['denied_schemes'])
        Registry().set('denied_schemes', denied_schemes)
        Registry().get('http').set_allowed_types(
            parse_split_conf(
                Registry().get('config')['spider']['scan_content_types']))
        Registry().get('http').set_denied_types(
            parse_split_conf(
                Registry().get('config')['spider']['noscan_content_types']))
示例#14
0
    def load_dict(self, dict_for_load, drop=True):
        """ Fill collection from dict """
        if drop:
            self.collection.drop()

        counter = 0
        last = "START OF FILE"

        for line in dict_for_load:
            try:
                line = line.strip()
                unicode(line)
                self.collection.insert(self.build_row(line))
            except UnicodeDecodeError:
                _str = " UNICODE ERROR: In file '{0}' skip word '{1}', after word '{2}' !".format(file, line, last)
                if Registry().isset('logger'):
                    Registry().get('logger').log(_str)
                else:
                    print _str

                continue

            counter += 1
            last = line

        self.load_data()

        return counter
示例#15
0
 def list_action(self):
     """ Action list of module """
     if self.options['ip'].value:
         print "{0:=^51}".format("")
         print "|{0: ^49}|".format("Hosts for IP '{0}'".format(
             self.options['ip'].value))
         print "{0:=^51}".format("")
         print "| {0: ^23}| {1: ^23}|".format('Title', 'Description')
         print "{0:=^51}".format("")
         for host in self.model.list(Registry().get('pData')['id'],
                                     self.options['ip'].value):
             print "| {0: <23}| {1: <23}|".format(host['name'],
                                                  host['descr'])
         print "{0:=^51}".format("")
     else:
         print "{0:=^76}".format("")
         print "|{0: ^74}|".format("All host for project '{0}'".format(
             Registry().get('pData')['name']))
         print "{0:=^76}".format("")
         print "| {0: ^23}| {1: ^23}| {2: ^23}|".format(
             'Title', 'Description', 'IP')
         print "{0:=^76}".format("")
         for host in self.model.list_without_ip(
                 Registry().get('pData')['id']):
             print "| {0: <23}| {1: <23}| {2: <23}|".format(
                 host['name'], host['descr'], host['ip'])
         print "{0:=^76}".format("")
示例#16
0
    def load_file_in_db(self, file_path):
        """
        Import in-db file in database with mysqlimport util
        :param file_path:
        :return:
        """
        self.update_status('putindb')
        self.log("Data go to DB")

        if os.path.exists(self.tmp_dir + "/hashes"):
            os.remove(self.tmp_dir + "/hashes")

        hashes_file_path = self.tmp_dir + "/hashes"
        shutil.move(file_path, hashes_file_path)

        importcmd = "mysqlimport --lock-tables --user {0} -p{1} --local " \
                    "--columns hashlist_id,hash,salt,summ --fields-enclosed-by '\"'" \
                    " --fields-terminated-by ',' --fields-escaped-by \"\\\\\" {2} {3}"\
                    .format(
                        Registry().get('config')['main']['mysql_user'],
                        Registry().get('config')['main']['mysql_pass'],
                        Registry().get('config')['main']['mysql_dbname'],
                        self.tmp_dir + "/hashes"
                    )

        subprocess.check_output(importcmd, shell=True)

        os.remove(hashes_file_path)
示例#17
0
 def add(self,
         pid,
         host_id,
         url,
         referer='',
         response_code=0,
         response_time=0,
         who_add='human',
         spidered=0,
         size=0,
         descr=''):
     """ Add url to table """
     try:
         return self._db.insert(
             "urls", {
                 "project_id": pid,
                 "host_id": host_id,
                 "hash": md5(url),
                 "url": url,
                 "referer": referer,
                 "response_code": response_code,
                 "response_time": response_time,
                 "when_add": int(time.time()),
                 "who_add": who_add,
                 "spidered": spidered,
                 "size": size,
                 "descr": descr
             }, 1)
     except BaseException as e:
         if Registry().isset('logger'):
             Registry().get('logger').ex(e)
         else:
             print "Can`t add url: " + str(e)
示例#18
0
文件: Pre.py 项目: Sts0mrg0/ws-cli
    def check_backups(self):
        """ Simple check backups """
        Http = Registry().get('http')
        check = []
        result = []

        dirs = file_to_list(Registry().get('wr_path') + "/bases/pre-backups-dirs.txt")
        dirs.append(self.options['host'].value)
        files = file_to_list(Registry().get('wr_path') + "/bases/pre-backups-files.txt")
        dirs_exts = file_to_list(Registry().get('wr_path') + "/bases/bf-dirs.txt")
        files_exts = file_to_list(Registry().get('wr_path') + "/bases/bf-files.txt")

        for _dir in dirs:
            for dir_ext in dirs_exts:
                check.append(dir_ext.replace('|name|', _dir))
        for _file in files:
            for file_ext in files_exts:
                check.append(file_ext.replace('|name|', _file))

        for url in check:
            r = Http.get(self.root_url + url)
            if not self._response_404(r):
                result.append({'url': self.root_url + url, 'code': r.status_code})

        return result
示例#19
0
    def __init__(self, queue, domain, protocol, method, not_found_re, delay,
                 ddos_phrase, ddos_human, recreate_re, counter, result):
        super(SBackupsFinderThread, self).__init__()
        self.queue = queue
        self.method = method if not (len(not_found_re)
                                     and method.lower() == 'head') else 'get'
        self.domain = domain
        self.result = result
        self.counter = counter
        self.protocol = protocol
        self.not_found_re = False if not len(not_found_re) else re.compile(
            not_found_re)
        self.done = False
        self.http = Registry().get('http')
        self.delay = int(delay)
        self.ddos_phrase = ddos_phrase
        self.ddos_human = ddos_human
        self.recreate_re = False if not len(recreate_re) else re.compile(
            recreate_re)

        self.logger = Registry().get('logger')

        Registry().set('url_for_proxy_check',
                       "{0}://{1}".format(protocol, domain))

        self.browser_create()
示例#20
0
    def links_in_spider_base(pid, host):
        """ Put found links in MySQL """
        links_per_time_limit = 50
        c = WSCounter(
            1, 60,
            int(Registry().get('mongo').spider_urls.count() /
                links_per_time_limit))
        Urls = UrlsModel()
        host_id = HostsModel().get_id_by_name(pid, host)
        urls_add = []

        skip = 0
        while True:
            links = mongo_result_to_list(
                Registry().get('mongo').spider_urls.find().skip(skip).limit(
                    links_per_time_limit))

            for link in links:
                url = link['path'] + '?' + link['query'] if len(
                    link['query']) else link['path']
                urls_add.append({
                    'url': url,
                    'referer': link['referer'],
                    'response_code': link['code'],
                    'response_time': link['time'],
                    'size': link['size'],
                    'who_add': 'spider',
                    'spidered': link['checked']
                })
            Urls.add_mass(pid, host_id, urls_add)

            urls_add = []

            to_update = {'spidered': [], 'code': [], 'time': [], 'size': []}

            for link in links:
                url = link['path'] + '?' + link['query'] if len(
                    link['query']) else link['path']
                if link['checked']:
                    to_update['spidered'].append({'url': url, 'value': 1})
                to_update['code'].append({'url': url, 'value': link['code']})
                to_update['time'].append({'url': url, 'value': link['time']})
                to_update['size'].append({'url': url, 'value': link['size']})

            Urls.update_url_field_mass(pid, host, 'spidered',
                                       to_update['spidered'])
            Urls.update_url_field_mass(pid, host, 'response_code',
                                       to_update['code'])
            Urls.update_url_field_mass(pid, host, 'response_time',
                                       to_update['time'])
            Urls.update_url_field_mass(pid, host, 'size', to_update['size'])

            skip += len(links)

            c.up()

            if len(links) < links_per_time_limit:
                break
示例#21
0
class DnsBruteThread(threading.Thread):
    """ Thread class for DnsBrute* modules """
    done = False

    def __init__(self, queue, domain, proto, msymbol, dns_srv, delay, result,
                 counter):
        threading.Thread.__init__(self)
        self.queue = queue
        self.domain = domain
        self.proto = proto
        self.dns_srv = dns_srv
        self.counter = counter
        self.msymbol = msymbol
        self.result = result
        self.delay = int(delay)
        self.done = False
        self.logger = Registry().get('logger')

    def run(self):
        """ Run thread """
        ip_re = re.compile(r"(\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})")
        ns_resp_re = re.compile(r";ANSWER\s(?P<data>(.|\s)*)\s;AUTHORITY",
                                re.M)

        req_func = getattr(dns.query, self.proto.lower())

        while True:
            host = None
            if self.delay:
                time.sleep(self.delay)
            try:
                host = self.queue.get()

                self.counter.up()
                check_name = self.domain.replace(self.msymbol, host)
                query = dns.message.make_query(check_name, 'A')
                result = req_func(query, self.dns_srv, timeout=5)
                response = ns_resp_re.search(result.to_text())
                if response is not None:
                    for ip in ip_re.findall(response.group('data')):
                        self.result.append({
                            'name': check_name,
                            'ip': ip,
                            'dns': self.dns_srv
                        })
                        break

                if len(self.result) >= int(Registry().get('config')['main']
                                           ['positive_limit_stop']):
                    Registry().set('positive_limit_stop', True)

            except Queue.Empty:
                self.done = True
                break

            except BaseException as e:
                self.logger.ex(e)
                time.sleep(5)
    def __init__(self):
        threading.Thread.__init__(self)

        self.logger = Registry().get('logger')

        self.retest_limit = int(
            Registry().get('config')['main']['retest_limit'])
        self.retest_delay = int(
            Registry().get('config')['main']['retest_delay'])
    def test_is_critical_stop(self, proxy_many_died, positive_limit_stop,
                              errors_limit, expected):
        Registry().set('config', {"main": {"errors_limit": "10"}})
        Registry().set('proxy_many_died', proxy_many_died)
        Registry().set('positive_limit_stop', positive_limit_stop)
        ErrorsCounter.counter = 100 if errors_limit else 0

        module = ModuleMock(False)
        assert expected == module.is_critical_stop()
示例#24
0
 def get_denied_schemas():
     """ Get list of denied schemas """
     if SpiderCommon.denied_schemas is None:
         denied_schemas = Registry().get('config')['spider']['denied_schemes'].split(',')
         for dschema in denied_schemas:
             index = denied_schemas.index(dschema)
             denied_schemas[index] = dschema.encode('utf8')
         SpiderCommon.denied_schemas = list(map(str.strip, denied_schemas))
     return SpiderCommon.denied_schemas
示例#25
0
 def links_in_database(pid, host):
     """ Method for insert all found links in MySQL in work end """
     Registry().get(
         'logger').log("\nInsert links in DB..." +
                       time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()))
     SpiderCommon.links_in_spider_base(pid, host)
     Registry().get(
         'logger').log("\nInsert links in DB (base)..." +
                       time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()))
     SpiderCommon.links_in_urls_base(pid, host)
示例#26
0
 def get_denied_schemas():
     """ Get list of denied schemas """
     if SpiderCommon.denied_schemas is None:
         denied_schemas = Registry().get(
             'config')['spider']['denied_schemes'].split(',')
         for dschema in denied_schemas:
             index = denied_schemas.index(dschema)
             denied_schemas[index] = dschema.encode('utf8')
         SpiderCommon.denied_schemas = list(map(str.strip, denied_schemas))
     return SpiderCommon.denied_schemas
示例#27
0
 def is_response_length_less_than_limit(self, url, resp):
     """ Checking response length by Content-Length header """
     if 'content-length' in resp.headers and \
                     int(resp.headers['content-length']) > int(Registry().get('config')['main']['max_size']):
         self.errors['maxsize'].append(
             "URL {0} has size {1} bytes, but limit in config - {2} bytes".
             format(url, resp.headers['content-length'],
                    Registry().get('config')['main']['max_size']))
         return False
     return True
示例#28
0
    def prepare_first_pages(host):
        """ Prepare link on first page in MongoDB. Add root url if urls for this host not exists.  """
        pid = Registry().get('pData')['id']

        coll = Registry().get('mongo').spider_urls
        coll.drop()

        Urls = UrlsModel()
        urls = Urls.list_by_host_name_for_spider(pid, host)
        if not len(urls):
            Registry().get('logger').log("Spider: Root URL was added automaticaly")
            Urls.add(
                pid, HostsModel().get_id_by_name(pid, host), '/', who_add='spider'
            )
            urls = Urls.list_by_host_name_for_spider(pid, host)

        for url in urls:
            url = urlparse(url['url'])
            data = {
                'hash': md5(str(url.path + url.query)),
                'path': url.path,
                'query': url.query,
                'time': 0,
                'code':0,
                'checked': 0,
                'getted' : 0,
                'referer': '',
                'size': 0,
                'founder': 'spider'
            }

            coll.insert(data)

        coll.create_index([('hash', 1)], unique=True, dropDups=True)
        coll.create_index([('checked', 1)])
示例#29
0
文件: Pre.py 项目: Sts0mrg0/ws-cli
    def check_dafs(self, _type):
        """ Simple dafs search """
        Http = Registry().get('http')
        result = []
        _dict = open(Registry().get('wr_path') + '/bases/pre-dafs-{0}.txt'.format(_type), 'r').read().split("\n")
        for obj in _dict:
            r = Http.get(self.root_url + obj)
            if r is not None and not self._response_404(r):
                result.append({'url': "/" + obj, 'code': r.status_code, 'time': 0})

        return result
示例#30
0
 def __init__(self, job, host, src, delay, counter):
     WSThread.__init__(self, None, None)
     self.job = job
     self.host = host
     self.links_parser = SpiderLinksParser()
     #self.not_found_phrase = not_found_phrase
     self.http = Registry().get('http')
     self.src = src
     self.delay = int(delay)
     self.counter = counter
     self._db = Registry().get('mongo')
 def check_positive_limit_stop(self, result, rate=1):
     """
     Does we have too many positive results now?
     :param result:
     :param rate:
     :return:
     """
     if len(result) >= (
             int(Registry().get('config')['main']['positive_limit_stop']) *
             rate):
         Registry().set('positive_limit_stop', True)
示例#32
0
    def parse_links(self, content_type, content, link):
        """
        Method parse a page content and extract links from it. Can parse xml, html and css.
        Other content parse how plain text
        :param content_type: type of content (text/xml, application/xml, text/html, text/css or other one)
        :param content: content of page
        :param link: url of current page
        :return:
        """
        try:
            if not len(content.strip()):
                return []

            if content_type in ['text/xml', 'application/xml'
                                ] or content[:6] == '<?xml ':
                links = self.parse_links_xml(content)
                text_links = self.parse_links_text_re(content)
                links.extend(text_links)
            elif content_type == 'text/css':
                links = self.parse_links_css(content)
                text_links = self.parse_links_text_re(content)
                links.extend(text_links)
            elif content_type == 'text/html':
                links = self.parse_links_html_re(content)
                text_links = self.parse_links_text_re(content)
                links.extend(text_links)
            else:
                links = self.parse_links_html_re(content)
                text_links = self.parse_links_text_re(content)
                links.extend(text_links)

            if Registry().isset('ignore_regexp'):
                links = self._clear_ignore(links)

            if Registry().isset('only_one'):
                links = self._clear_only_one(links)

            if Registry().get('config')['spider']['denied_schemes']:
                links = self._clear_by_schema(links)

        except etree.XMLSyntaxError:
            links = self.parse_links_text_re(content)
            Registry().get('logger').log(
                " Document syntax error {0}, parsing as text".format(
                    link['path'] + '?' +
                    link['query'] if len(link['query']) else link['path']))
        except etree.DocumentInvalid:
            links = self.parse_links_text_re(content)
            Registry().get('logger').log(
                " Document invalid {0}, parsing as text".format(
                    link['path'] + '?' +
                    link['query'] if len(link['query']) else link['path']))

        return list(set(links))
示例#33
0
文件: Pre.py 项目: Sts0mrg0/ws-cli
    def _insert_infos(self, result):
        """ Insert found infos in db """
        h_id = HostsModel().get_id_by_name(Registry().get('pData')['id'],
                                           self.options['host'].value)
        HI = HostsInfoModel()

        for k in result:
            HI.set_info(
                Registry().get('pData')['id'], h_id, k,
                json.dumps(result[k])
                if result[k] and len(str(result[k])) else '')
示例#34
0
    def run(self):
        """ Run thread """
        while not self.done:
            self.last_action = int(time.time())

            if self.delay:
                time.sleep(self.delay)

            try:
                word = self.queue.get()
                self.counter.up()

                url = "{0}://{1}{2}".format(self.protocol, self.domain, word)

                self.browser.get(url)

                if self.recreate_re and self.recreate_re.findall(
                        self.browser.page_source):
                    #self.queue.task_done(word)
                    #self.queue.put(word)
                    self.browser_close()
                    self.browser_create()
                    continue

                if not self.not_found_re.findall(self.browser.page_source):
                    self.result.append(word)

                self.logger.item(word, self.browser.page_source, True)

                if len(self.result) >= int(Registry().get('config')['main']
                                           ['positive_limit_stop']):
                    Registry().set('positive_limit_stop', True)

                #self.queue.task_done(word)
            except Queue.Empty:
                self.done = True
                break
            except TimeoutException as e:
                self.queue.put(word)
                self.browser_close()
                self.browser_create()
                continue
            except BaseException as e:
                #self.queue.task_done(word)
                if not str(e).count('Timed out waiting for page load'):
                    self.logger.ex(e)
                if str(e).count("Connection refused"):
                    self.queue.put(word)
                    self.browser_close()
                    self.browser_create()
            self.up_requests_count()

        self.browser_close()
示例#35
0
 def __init__(self, queue, domain, protocol, method, delay, counter, result):
     threading.Thread.__init__(self)
     self.queue = queue
     self.method = method.lower()
     self.domain = domain
     self.result = result
     self.counter = counter
     self.protocol = protocol
     self.done = False
     self.bad_words = file_to_list(Registry().get('wr_path') + "/bases/bad-words.txt")
     self.http = Registry().get('http')
     self.delay = int(delay)
示例#36
0
    def __init__(self, host, user, password, basename):
        self._host = host
        self._user = user
        self._password = password
        self._basename = basename

        self.connect()

        self._restart_by_deadlock_limit = int(
            Registry().get('config')['main']['restarts_by_deadlock_limit'])
        self._sleep_by_deadlock_restart = int(
            Registry().get('config')['main']['sleep_by_deadlock_restart'])
示例#37
0
    def _get_codes_stat(self):
        """ Build dict with http-codes and their counts """
        coll = Registry().get('mongo').spider_urls
        result = {}

        codes = coll.group({'code': True}, '', {}, 'function () {}')
        for code in codes:
            links = []
            code = code['code']
            data = coll.find({'code': code}, {'path': 1, 'query': 1})
            for link in mongo_result_to_list(data):
                links.append(link['path'] + '?' + link['query'] if link['query'] else link['path'])
            result[int(code)] = links

        return result
示例#38
0
    def __init__(
            self, queue, protocol, host, url, max_params_length, value, method, mask_symbol, not_found_re,
            not_found_size, not_found_codes, retest_codes, delay, ignore_words_re,
            counter, result):
        super(ParamsBruterThread, self).__init__()
        self.retested_words = {}

        self.queue = queue
        self.protocol = protocol.lower()
        self.host = host
        self.url = url
        self.mask_symbol = mask_symbol
        self.counter = counter
        self.result = result
        self.value = value
        self.done = False
        self.max_params_length = int(max_params_length)
        self.ignore_words_re = False if not len(ignore_words_re) else re.compile(ignore_words_re)
        self.not_found_re = False if not len(not_found_re) else re.compile(not_found_re)
        self.not_found_size = int(not_found_size)
        self.method = method.lower()

        not_found_codes = not_found_codes.split(',')
        not_found_codes.append('404')
        self.not_found_codes = list(set(not_found_codes))
        self.retest_codes = list(set(retest_codes.split(','))) if len(retest_codes) else []

        self.delay = int(delay)
        self.retest_delay = int(Registry().get('config')['params_bruter']['retest_delay'])

        self.http = copy.deepcopy(Registry().get('http'))
        self.logger = Registry().get('logger')
        self.retest_limit = int(Registry().get('config')['dafs']['retest_limit'])
示例#39
0
    def __init__(
            self, queue, protocol, host, template, method, mask_symbol, not_found_re,
            not_found_size, not_found_codes, retest_codes, delay, ignore_words_re,
            counter, result):
        threading.Thread.__init__(self)
        self.retested_words = {}

        self.queue = queue
        self.protocol = protocol.lower()
        self.host = host
        self.template = template
        self.mask_symbol = mask_symbol
        self.counter = counter
        self.result = result
        self.done = False
        self.ignore_words_re = False if not len(ignore_words_re) else re.compile(ignore_words_re)
        self.not_found_re = False if not len(not_found_re) else re.compile(not_found_re)
        self.not_found_size = int(not_found_size)
        self.method = method
        if method == 'head' and (len(not_found_re) or self.not_found_size != -1):
            self.method = 'get'

        not_found_codes = not_found_codes.split(',')
        not_found_codes.append('404')
        self.not_found_codes = list(set(not_found_codes))
        self.retest_codes = list(set(retest_codes.split(','))) if len(retest_codes) else []

        self.delay = int(delay)
        self.retest_delay = int(Registry().get('config')['dafs']['retest_delay'])

        self.http = copy.deepcopy(Registry().get('http'))
        self.logger = Registry().get('logger')

        self.retest_limit = int(Registry().get('config')['dafs']['retest_limit'])
示例#40
0
 def __init__(
         self, queue, protocol, host, url, false_phrase, true_phrase, retest_codes, delay,
         confstr, first_stop, login, pass_min_len, pass_max_len, pass_found, counter, result
 ):
     threading.Thread.__init__(self)
     self.retested_words = {}
     self.queue = queue
     self.protocol = protocol.lower()
     self.host = host
     self.url = url
     self.false_phrase = false_phrase
     self.true_phrase = true_phrase
     self.delay = int(delay)
     self.confstr = confstr
     self.first_stop = first_stop
     self.login = login
     self.counter = counter
     self.result = result
     self.retest_codes = list(set(retest_codes.split(','))) if len(retest_codes) else []
     self.pass_found = pass_found
     self.done = False
     self.logger = Registry().get('logger')
     self.http = copy.deepcopy(Registry().get('http'))
     self.http.every_request_new_session = True
     self.pass_min_len = int(pass_min_len)
     self.pass_max_len = int(pass_max_len)
     self.retest_delay = int(Registry().get('config')['form_bruter']['retest_delay'])
     self.retest_limit = int(Registry().get('config')['form_bruter']['retest_limit'])
示例#41
0
    def __init__(
            self, queue, protocol, host, template, method, mask_symbol, not_found_re,
            delay, ddos_phrase, ddos_human, recreate_re, ignore_words_re, counter, result
    ):
        super(SDafsThread, self).__init__()
        self.queue = queue
        self.protocol = protocol.lower()
        self.host = host
        self.method = method if not (len(not_found_re) and method.lower() == 'head') else 'get'
        self.template = template
        self.mask_symbol = mask_symbol
        self.counter = counter
        self.result = result
        self.done = False
        self.not_found_re = False if not len(not_found_re) else re.compile(not_found_re)
        self.recreate_re = False if not len(recreate_re) else re.compile(recreate_re)
        self.http = Registry().get('http')
        self.delay = int(delay)
        self.ddos_phrase = ddos_phrase
        self.ddos_human = ddos_human
        self.ignore_words_re = False if not len(ignore_words_re) else re.compile(ignore_words_re)

        Registry().set('url_for_proxy_check', "{0}://{1}".format(protocol, host))

        self.browser_create()

        self.logger = Registry().get('logger')
示例#42
0
    def __init__(
            self, queue, protocol, host, url, false_phrase, true_phrase, delay, ddos_phrase, ddos_human, recreate_phrase,
            conffile, first_stop, login, #reload_form_page,
            pass_found, counter, result
    ):
        super(SFormBruterThread, self).__init__()
        self.retested_words = {}

        self.queue = queue
        self.protocol = protocol.lower()
        self.host = host
        self.url = url
        self.delay = int(delay)
        self.ddos_phrase = ddos_phrase
        self.ddos_human = ddos_human
        self.recreate_phrase = recreate_phrase
        self.conffile = conffile
        self.false_phrase = false_phrase
        self.true_phrase = true_phrase
        self.first_stop = first_stop
        self.login = login
        self.pass_found = pass_found
        self.logger = Registry().get('logger')
        #self.reload_form_page = int(reload_form_page)

        self.browser_create()

        self.counter = counter
        self.result = result
        self.done = False

        Registry().set('url_for_proxy_check', "{0}://{1}".format(protocol, host))
示例#43
0
    def __init__(
            self, queue, protocol, host, template, mask_symbol,
            false_phrase, retest_codes, delay, ignore_words_re, counter, result):
        threading.Thread.__init__(self)
        self.retested_words = {}

        self.queue = queue
        self.protocol = protocol.lower()
        self.host = host
        self.template = template
        self.mask_symbol = mask_symbol
        self.counter = counter
        self.result = result
        self.done = False

        self.false_phrase = false_phrase
        self.retest_codes = list(set(retest_codes.split(','))) if len(retest_codes) else []

        self.delay = int(delay)
        self.retest_delay = int(Registry().get('config')['hosts_brute']['retest_delay'])

        self.http = copy.deepcopy(Registry().get('http'))
        self.logger = Registry().get('logger')

        self.method = 'get'

        self.ignore_words_re = False if not len(ignore_words_re) else re.compile(ignore_words_re)

        self.retest_limit = int(Registry().get('config')['hosts_brute']['retest_limit'])
示例#44
0
    def __init__(self, queue, domain, url, protocol, method, not_found_re, not_found_codes, delay, counter, result):
        threading.Thread.__init__(self)
        self.queue = queue
        self.method = method if not (len(not_found_re) and method.lower() == 'head') else 'get'
        self.domain = domain
        self.url = url
        self.result = result
        self.counter = counter
        self.protocol = protocol
        self.not_found_re = False if not len(not_found_re) else re.compile(not_found_re)

        not_found_codes = not_found_codes.split(',')
        not_found_codes.append('404')
        self.not_found_codes = list(set(not_found_codes))

        self.delay = int(delay)

        self.done = False
        self.http = Registry().get('http')
        self.logger = Registry().get('logger')
示例#45
0
 def __init__(self, queue, domain, protocol, method, delay, counter, result):
     threading.Thread.__init__(self)
     self.queue = queue
     self.method = method.lower()
     self.domain = domain
     self.result = result
     self.counter = counter
     self.protocol = protocol
     self.done = False
     self.bad_words = file_to_list(Registry().get('wr_path') + "/bases/bad-words.txt")
     self.http = Registry().get('http')
     self.delay = int(delay)
示例#46
0
    def _get_extensions(self):
        """ Build files extensions list """
        result = {}
        coll = Registry().get('mongo').spider_urls
        links = coll.group({'path': True}, '', {}, 'function () {}')
        links = mongo_result_to_list(links)

        exts = []
        for link in links:
            if link['path'].rfind('.') > -1 and len(link['path']) - link['path'].rfind('.') <= 5:
                exts.append(link['path'][link['path'].rfind('.'):])

        for ext in list(set(exts)):
            if ext not in result:
                result[ext] = []

            links = coll.find({'path': re.compile('\\' + ext + '$')})
            links = mongo_result_to_list(links)

            for link in links:
                result[ext].append(link['path'] + '?' + link['query'] if link['query'] else link['path'])

        return result
示例#47
0
 def __init__(self, queue, domains, template, proto, msymbol, ignore_ip, dns_srv, delay, http_nf_re, ignore_words_re, result, counter):
     threading.Thread.__init__(self)
     self.queue = queue
     self.domains = domains
     self.proto = proto
     self.dns_srv = dns_srv
     self.counter = counter
     self.msymbol = msymbol
     self.template = template
     self.result = result
     self.delay = int(delay)
     self.done = False
     self.logger = Registry().get('logger')
     self.ignore_ip = ignore_ip
     self.http_nf_re = re.compile(http_nf_re) if len(http_nf_re) else None
     self.ignore_words_re = False if not len(ignore_words_re) else re.compile(ignore_words_re)
示例#48
0
    def setup_class(self):
        """ Prepare class for run tests """
        CURPATH = os.path.dirname(__file__) + "/"

        config = configparser.ConfigParser()
        config.read(CURPATH + 'config.ini')
        Registry().set('config', config)

        Registry().set('logger', LoggerMock())

        db = Database(
            config['main']['mysql_host'],
            config['main']['mysql_user'],
            config['main']['mysql_pass'],
            config['main']['mysql_dbname'],
        )
        Registry().set('db', db)

        self.db = Registry().get('db')  # type: Database
示例#49
0
    def __init__(
            self, queue, domain, protocol, method, not_found_re,
            delay, ddos_phrase, ddos_human, recreate_re, counter, result
    ):
        super(SBackupsFinderThread, self).__init__()
        self.queue = queue
        self.method = method if not (len(not_found_re) and method.lower() == 'head') else 'get'
        self.domain = domain
        self.result = result
        self.counter = counter
        self.protocol = protocol
        self.not_found_re = False if not len(not_found_re) else re.compile(not_found_re)
        self.done = False
        self.http = Registry().get('http')
        self.delay = int(delay)
        self.ddos_phrase = ddos_phrase
        self.ddos_human = ddos_human
        self.recreate_re = False if not len(recreate_re) else re.compile(recreate_re)

        self.logger = Registry().get('logger')

        Registry().set('url_for_proxy_check', "{0}://{1}".format(protocol, domain))

        self.browser_create()
示例#50
0
class SBackupsFinderThread(SeleniumThread):
    """ Thread class for BF module (selenium) """
    queue = None
    method = None
    url = None
    counter = None
    last_action = 0

    def __init__(
            self, queue, domain, protocol, method, not_found_re,
            delay, ddos_phrase, ddos_human, recreate_re, counter, result
    ):
        super(SBackupsFinderThread, self).__init__()
        self.queue = queue
        self.method = method if not (len(not_found_re) and method.lower() == 'head') else 'get'
        self.domain = domain
        self.result = result
        self.counter = counter
        self.protocol = protocol
        self.not_found_re = False if not len(not_found_re) else re.compile(not_found_re)
        self.done = False
        self.http = Registry().get('http')
        self.delay = int(delay)
        self.ddos_phrase = ddos_phrase
        self.ddos_human = ddos_human
        self.recreate_re = False if not len(recreate_re) else re.compile(recreate_re)

        self.logger = Registry().get('logger')

        Registry().set('url_for_proxy_check', "{0}://{1}".format(protocol, domain))

        self.browser_create()

    def run(self):
        """ Run thread """
        while not self.done:
            self.last_action = int(time.time())

            if self.delay:
                time.sleep(self.delay)

            try:
                word = self.queue.get()
                self.counter.up()

                url = "{0}://{1}{2}".format(self.protocol, self.domain, word)

                self.browser.get(url)

                if self.recreate_re and self.recreate_re.findall(self.browser.page_source):
                    #self.queue.task_done(word)
                    #self.queue.put(word)
                    self.browser_close()
                    self.browser_create()
                    continue

                positive_item = False
                if not self.not_found_re.findall(self.browser.page_source):
                    self.result.append(word)
                    positive_item = True

                self.logger.item(word, self.browser.page_source, True, positive=positive_item)

                if len(self.result) >= int(Registry().get('config')['main']['positive_limit_stop']):
                    Registry().set('positive_limit_stop', True)

                #self.queue.task_done(word)
            except Queue.Empty:
                self.done = True
                break
            except TimeoutException as e:
                self.queue.put(word)
                self.browser_close()
                self.browser_create()
                continue
            except BaseException as e:
                #self.queue.task_done(word)
                if not str(e).count('Timed out waiting for page load'):
                    self.logger.ex(e)
                if str(e).count("Connection refused"):
                    self.queue.put(word)
                    self.browser_close()
                    self.browser_create()
            self.up_requests_count()

        self.browser_close()
示例#51
0
class BackupsFinderThread(HttpThread):
    """ Thread class for BF module """
    queue = None
    method = None
    url = None
    counter = None
    last_action = 0

    def __init__(self, queue, domain, protocol, method, not_found_re, not_found_codes,
                 not_found_size, delay, counter, result):
        super(BackupsFinderThread, self).__init__()
        self.queue = queue
        self.domain = domain
        self.result = result
        self.counter = counter
        self.protocol = protocol
        self.not_found_re = False if not len(not_found_re) else re.compile(not_found_re)
        self.not_found_size = int(not_found_size)
        self.method = method.lower()

        self.method = method if \
            not ((len(not_found_re) or self.not_found_size != -1) and method.lower() == 'head') else \
            'get'

        not_found_codes = not_found_codes.split(',')
        not_found_codes.append('404')
        self.not_found_codes = list(set(not_found_codes))

        self.delay = int(delay)

        self.done = False
        self.http = Registry().get('http')
        self.logger = Registry().get('logger')

    def run(self):
        """ Run thread """
        req_func = getattr(self.http, self.method)
        need_retest = False

        while not self.done:
            self.last_action = int(time.time())

            if self.delay:
                time.sleep(self.delay)
            try:
                if not need_retest:
                    word = self.queue.get()
                    self.counter.up()

                url = "{0}://{1}{2}".format(self.protocol, self.domain, word)

                try:
                    resp = req_func(url)
                except ConnectionError:
                    need_retest = True
                    self.http.change_proxy()
                    continue

                positive_item = False
                if self.is_response_right(resp):
                    self.result.append(word)
                    positive_item = True

                self.log_item(word, resp, positive_item)

                self.check_positive_limit_stop(self.result)

                need_retest = False
            except Queue.Empty:
                self.done = True
                break
            except ChunkedEncodingError as e:
                self.logger.ex(e)
            except BaseException as e:
                self.logger.ex(e)
示例#52
0
class HostsBruteThread(HttpThread):
    """ Thread class for HostsBrute modules """
    queue = None
    method = None
    url = None
    mask_symbol = None
    counter = None
    retested_words = None
    last_action = 0

    def __init__(
            self, queue, protocol, host, template, mask_symbol,
            false_phrase, retest_codes, delay, ignore_words_re, counter, result):
        threading.Thread.__init__(self)
        self.retested_words = {}

        self.queue = queue
        self.protocol = protocol.lower()
        self.host = host
        self.template = template
        self.mask_symbol = mask_symbol
        self.counter = counter
        self.result = result
        self.done = False

        self.false_phrase = false_phrase
        self.retest_codes = list(set(retest_codes.split(','))) if len(retest_codes) else []

        self.delay = int(delay)
        self.retest_delay = int(Registry().get('config')['hosts_brute']['retest_delay'])

        self.http = copy.deepcopy(Registry().get('http'))
        self.logger = Registry().get('logger')

        self.method = 'get'

        self.ignore_words_re = False if not len(ignore_words_re) else re.compile(ignore_words_re)

        self.retest_limit = int(Registry().get('config')['hosts_brute']['retest_limit'])

    def run(self):
        """ Run thread """
        req_func = getattr(self.http, self.method)
        need_retest = False
        word = False

        while not self.done:
            self.last_action = int(time.time())

            if self.delay:
                time.sleep(self.delay)

            try:
                if not need_retest:
                    word = self.queue.get()
                    self.counter.up()

                if not len(word.strip()) or (self.ignore_words_re and self.ignore_words_re.findall(word)):
                    continue

                try:
                    hostname = self.template.replace(self.mask_symbol, word)
                except UnicodeDecodeError:
                    self.logger.log(
                        "URL build error (UnicodeDecodeError) with word '{0}', skip it".format(pprint.pformat(word)),
                        _print=False
                    )
                    continue

                try:
                    resp = req_func(self.protocol + "://" + self.host, headers={'host': hostname})
                except ConnectionError:
                    need_retest = True
                    self.http.change_proxy()
                    continue

                if self.is_retest_need(word, resp):
                    time.sleep(self.retest_delay)
                    need_retest = True
                    continue

                search_scope = ""
                for header in resp.headers:
                    search_scope += "{0}: {1}\r\n".format(header.title(), resp.headers[header])
                search_scope += '\r\n\r\n' + resp.text

                positive_item = False
                if resp is not None and not search_scope.count(self.false_phrase):
                    self.result.append(hostname)
                    positive_item = True

                self.log_item(word, resp, positive_item)

                self.check_positive_limit_stop(self.result)

                need_retest = False
            except Queue.Empty:
                self.done = True
                break

            except ChunkedEncodingError as e:
                self.logger.ex(e)
            except BaseException as e:
                try:
                    if str(e).count('Cannot connect to proxy'):
                        need_retest = True
                    else:
                        self.logger.ex(e)
                except UnicodeDecodeError:
                    pass
                except UnboundLocalError:
                    self.logger.ex(e)

            finally:
                pass
示例#53
0
文件: Common.py 项目: hack4sec/ws-cli
    def setup_class(self):
        config = configparser.ConfigParser()
        config.read('config.ini')

        try:
            db = mysql.connector.connect(
                host=config['db']['host'],
                user=config['db']['user'],
                password=config['db']['pass'],
                database=config['db']['database']
            )
            db.autocommit = True
        except mysql.connector.errors.ProgrammingError as e:
            print " ERROR: Can`t connect to MySQL server! ({0})".format(str(e))
            exit(0)

        try:
            mc = MongoClient(host=config['mongo']['host'], port=int(config['mongo']['port']))
            mongo_collection = getattr(mc, config['mongo']['collection'])
        except pymongo.errors.ConnectionFailure as e:
            print " ERROR: Can`t connect to MongoDB server! ({0})".format(str(e))
            exit(0)

        R = Registry()
        R.set('config', config)
        R.set('db', db)
        R.set('mongo', mongo_collection)
        R.set('wr_path', wrpath)
        R.set('data_path', wrpath + '/data/')
        R.set('ndb',
              Database(config['db']['host'], config['db']['user'], config['db']['pass'], config['db']['database']))
        R.set('proxies', Proxies())

        self.db = R.get('ndb')
示例#54
0
class CommonTest(object):
    """ Common class tests """
    db = None

    def setup_class(self):
        open(wrpath + '/proxies.list', 'w').close()

        config = configparser.ConfigParser()
        config.read(os.getcwd() + '/' + 'config.ini')

        try:
            db = mysql.connector.connect(
                host=config['db']['host'],
                user=config['db']['user'],
                password=config['db']['pass'],
                database=config['db']['database']
            )
            db.autocommit = True
        except mysql.connector.errors.ProgrammingError as e:
            print " ERROR: Can`t connect to MySQL server! ({0})".format(str(e))
            exit(0)

        try:
            mc = MongoClient(host=config['mongo']['host'], port=int(config['mongo']['port']))
            mongo_collection = getattr(mc, config['mongo']['collection'])
        except pymongo.errors.ConnectionFailure as e:
            print " ERROR: Can`t connect to MongoDB server! ({0})".format(str(e))
            exit(0)

        R = Registry()
        R.set('config', config)
        R.set('db', db)
        R.set('mongo', mongo_collection)
        R.set('wr_path', wrpath)
        R.set('data_path', wrpath + '/data/')
        R.set('ndb',
              Database(config['db']['host'], config['db']['user'], config['db']['pass'], config['db']['database']))

        self.db = Registry().get('ndb')

    def setup(self):
        self.clear_db()

    def clear_db(self):
        """ Clearing db """
        self.db.q("TRUNCATE TABLE `hosts`")
        self.db.q("TRUNCATE TABLE `ips`")
        self.db.q("TRUNCATE TABLE `projects`")
        self.db.q("TRUNCATE TABLE `urls`")
        self.db.q("TRUNCATE TABLE `requests`")
        self.db.q("TRUNCATE TABLE `hosts_info`")
        self.db.q("TRUNCATE TABLE `urls_base`")
        self.db.q("TRUNCATE TABLE `urls_base_params`")
        self.db.q("TRUNCATE TABLE `cms`")
        self.db.q("TRUNCATE TABLE `cms_paths`")
        self.db.q("TRUNCATE TABLE `cms_paths_hashes`")

    def output_errors(self, out):
        """
        Check output for errors
        :param out: output
        :return:
        """
        assert out.find("Traceback") == -1
        assert "killed by time" not in out

    def _replace_config(self, name):
        """
        Change work config
        :param name: config name (*.ini)
        :return:
        """
        shutil.copyfile("{0}/config.ini".format(wrpath), "{0}/config.ini.bak".format(wrpath))
        shutil.copyfile("{0}/configs/{1}.ini".format(testpath, name), "{0}/config.ini".format(wrpath))

    def _restore_config(self):
        """ Restore old original config """
        shutil.move("{0}/config.ini.bak".format(wrpath), "{0}/config.ini".format(wrpath))

    def _run(self, config_name, run_params):
        """
        Run WS
        :param config_name: name of config for test
        :param run_params: params for run WS-process
        :return: output of WS
        """
        self._replace_config(config_name)
        os.chdir(wrpath)
        out = subprocess.check_output(run_params)
        self._restore_config()
        self.output_errors(out)

        return out
示例#55
0
class FuzzerUrlsThread(threading.Thread):
    """ Thread class for FuzzerUrls module """
    queue = None
    method = None
    url = None
    counter = None
    last_action = 0

    def __init__(self, queue, domain, protocol, method, delay, counter, result):
        threading.Thread.__init__(self)
        self.queue = queue
        self.method = method.lower()
        self.domain = domain
        self.result = result
        self.counter = counter
        self.protocol = protocol
        self.done = False
        self.bad_words = file_to_list(Registry().get('wr_path') + "/bases/bad-words.txt")
        self.http = Registry().get('http')
        self.delay = int(delay)

    def run(self):
        """ Run thread """
        req_func = getattr(self.http, self.method)
        need_retest = False

        while True:
            self.last_action = int(time.time())

            if self.delay:
                time.sleep(self.delay)
            try:
                if not need_retest:
                    url = self.queue.get()

                try:
                    resp = req_func(
                        "{0}://{1}{2}".format(self.protocol, self.domain, url)
                    )
                except ConnectionError:
                    need_retest = True
                    self.http.change_proxy()
                    continue

                if resp is None:
                    continue

                if resp.status_code > 499 and resp.status_code < 600:
                    self.result.append({"url": url, "words": ["{0} Status code".format(resp.status_code)]})
                    continue

                found_words = []
                for bad_word in self.bad_words:
                    if resp.content.count(bad_word):
                        found_words.append(bad_word)

                if len(found_words):
                    self.result.append({"url": url, "words": found_words})

                self.counter.up()

                need_retest = False
            except Queue.Empty:
                self.done = True
                break
            except BaseException as e:
                print url + " " + str(e)
示例#56
0
文件: WSBase.py 项目: Sts0mrg0/ws-cli
    def __init__(self):
        config = configparser.ConfigParser()
        config.read(os.getcwd() + '/' + 'config.ini')

        try:
            db = mysql.connector.connect(
                host=config['db']['host'],
                user=config['db']['user'],
                password=config['db']['pass'],
                database=config['db']['database']
            )
            db.autocommit = True
        except mysql.connector.errors.ProgrammingError as e:
            print " ERROR: Can`t connect to MySQL server! ({0})".format(str(e))
            exit(0)

        try:
            mc = MongoClient(host=config['mongo']['host'], port=int(config['mongo']['port']))
            mongo_collection = getattr(mc, config['mongo']['collection'])
        except pymongo.errors.ConnectionFailure as e:
            print " ERROR: Can`t connect to MongoDB server! ({0})".format(str(e))
            exit(0)

        R = Registry()
        R.set('config', config)
        R.set('db', db)
        R.set('mongo', mongo_collection)
        R.set('wr_path', os.getcwd())
        R.set('data_path', os.getcwd() + '/data/')
        R.set('http', Http())
        R.set('proxies', Proxies())
        R.set(
            'ndb',
            Database(config['db']['host'], config['db']['user'], config['db']['pass'], config['db']['database'])
        )
        R.set(
            'fuzzer_evil_value',
            file_get_contents(Registry().get('wr_path') + "/bases/fuzzer-evil-value.txt").strip()
        )
        R.set('proxy_many_died', False)
        R.set('positive_limit_stop', False)

        if " ".join(sys.argv).count('selenium') and int(config['selenium']['virtual_display']):
            display = Display(visible=0, size=(800, 600))
            display.start()
            R.set('display', display)
示例#57
0
class MongoJob(WSJob):
    """ Common class for jobs works with MongoDB """
    unique = True
    collection = None
    select_limit = 50
    skip_blank_rows = True
    counter = 0
    collection_name = None

    def __init__(self, maxsize=0):
        WSJob.__init__(self, maxsize)
        self.collection = Registry().get('mongo')[self.collection_name]

    def build_row(self, _str):
        """ Common build row method for MongoDB """
        return {
            "name": _str.strip(),
            "checked": 0,
            "getted": 0
        }

    def qsize(self):
        """ Size of queue """
        return self.collection.find({"checked": 0}).count()

    def set_unique(self, unique=True):
        """ Enable remove dups in queue """
        self.unique = unique

    def set_skip_blank_rows(self, value=True):
        """ If True - we will skip blank rows then fill queue from dict or file """
        self.skip_blank_rows = value

    def task_done(self, name):
        """ Mark current row as done """
        self.counter += 1
        self.collection.update({'name': str(unicode(name)), "getted": 1}, {"$set": {"checked": 1}})
        WSJob.task_done(self)

    def get(self, block=False, timeout=None):
        """ Get next item from queue """
        if self.empty() or self.qsize() < 50:
            self.load_data()

        if self.empty():
            raise Queue.Empty

        return WSJob.get(self, block, timeout)

    def load_data(self):
        """ Load data into queue from MongoDB """
        data = self.collection.find(
            {"checked": 0, "getted": 0},
            limit=int(Registry().get('config')['main']['mongo_data_load_per_once'])
        )

        for row in data:
            self.put(row['name'])
            self.collection.update({"name": row['name']}, {"$set": {"getted": 1}})

        return True

    def load_dict(self, dict_for_load, drop=True):
        """ Fill collection from dict """
        if drop:
            self.collection.drop()

        counter = 0
        last = "START OF FILE"

        for line in dict_for_load:
            try:
                line = line.strip()
                unicode(line)
                self.collection.insert(self.build_row(line))
            except UnicodeDecodeError:
                _str = " UNICODE ERROR: In file '{0}' skip word '{1}', after word '{2}' !".format(file, line, last)
                if Registry().isset('logger'):
                    Registry().get('logger').log(_str)
                else:
                    print _str

                continue

            counter += 1
            last = line

        self.load_data()

        return counter

    def load_dom(self, dom):
        """ Fill queue from DictOfMask """
        self.collection.drop()
        while True:
            word = dom.get()
            if word is None:
                break
            self.collection.insert(self.build_row(word))
        self.collection.create_index('name', drop_dups=True, unique=self.unique)

        self.load_data()
        return self.collection.count()

    def load_file(self, _file):
        """ Fill queue from text file """
        self.collection.drop()

        fh = open(_file)

        last = "START OF FILE"
        while True:
            line = fh.readline()
            if not line:
                break
            if not line.strip() and self.skip_blank_rows:
                continue

            try:
                line = line.strip()
                unicode(line)
                self.collection.insert(self.build_row(line))
            except UnicodeDecodeError:
                _str = " UNICODE ERROR: In file '{0}' skip word '{1}', after word '{2}' !".format(_file, line, last)
                if Registry().isset('logger'):
                    Registry().get('logger').log(_str)
                else:
                    print _str
                continue

            last = line

        fh.close()

        self.collection.create_index('name', drop_dups=True, unique=self.unique)

        self.load_data()

        return self.collection.count()

    # 2 метода ниже взяты с
    # http://stackoverflow.com/questions/1581895/how-check-if-a-task-is-already-in-python-queue
    # Рецепт для уникальных задачь в очереди
    def _init(self, maxsize):
        WSJob._init(self, maxsize)
        if self.unique:
            self.all_items = set()

    def _put(self, item):
        if self.unique:
            if item not in self.all_items:
                WSJob._put(self, item)
                self.all_items.add(item)
            else:
                _str = "WARNING: try to add not unique item `{0}`".format(item)

                if Registry().isset('logger'):
                    #Registry().get('logger').log(_str)
                    pass
                else:
                    #print _str
                    pass
        else:
            WSJob._put(self, item)
示例#58
0
class SFormBruterThread(SeleniumThread):
    """ Thread class for FormBruter module (selenium) """
    queue = None
    method = None
    url = None
    mask_symbol = None
    counter = None
    retested_words = None
    logger = None
    last_action = 0
    first_page_load = False

    def __init__(
            self, queue, protocol, host, url, false_phrase, true_phrase, delay, ddos_phrase, ddos_human, recreate_phrase,
            conffile, first_stop, login, #reload_form_page,
            pass_found, counter, result
    ):
        super(SFormBruterThread, self).__init__()
        self.retested_words = {}

        self.queue = queue
        self.protocol = protocol.lower()
        self.host = host
        self.url = url
        self.delay = int(delay)
        self.ddos_phrase = ddos_phrase
        self.ddos_human = ddos_human
        self.recreate_phrase = recreate_phrase
        self.conffile = conffile
        self.false_phrase = false_phrase
        self.true_phrase = true_phrase
        self.first_stop = first_stop
        self.login = login
        self.pass_found = pass_found
        self.logger = Registry().get('logger')
        #self.reload_form_page = int(reload_form_page)

        self.browser_create()

        self.counter = counter
        self.result = result
        self.done = False

        Registry().set('url_for_proxy_check', "{0}://{1}".format(protocol, host))

    def parse_brute_config(self, path):
        """ Parse conf file to dict """
        to_return = {}
        have_user = False
        have_pass = False
        have_submit = False

        fh = open(path)
        for line in fh.readlines():
            if not len(line.strip()):
                continue

            point, selector = line.strip().split("    ")
            if point == "^USER^":
                have_user = True
            if point == "^PASS^":
                have_pass = True
            if point == "^SUBMIT^":
                have_submit = True

            to_return[point] = selector
        return to_return

    def run(self):
        """ Run thread """
        need_retest = False
        word = False

        brute_conf = self.parse_brute_config(self.conffile)

        while not self.pass_found and not self.done:
            try:
                self.last_action = int(time.time())

                if self.pass_found:
                    self.done = True
                    break

                if self.delay:
                    time.sleep(self.delay)

                if not need_retest:
                    word = self.queue.get()
                    self.counter.up()

                #if self.reload_form_page or \
                    #    (not self.browser.element_exists(By.CSS_SELECTOR, brute_conf['^USER^']) or
                    #     not self.browser.element_exists(By.CSS_SELECTOR, brute_conf['^PASS^'])) :
                    #self.browser.get(self.protocol + "://" + self.host + self.url)

                self.browser.get(self.protocol + "://" + self.host + self.url)

                if len(self.recreate_phrase) and self.browser.page_source.lower().count(self.recreate_phrase.lower()):
                    need_retest = True
                    self.browser_close()
                    self.browser_create()
                    continue

                self.browser.find_element(By.CSS_SELECTOR, brute_conf['^USER^']).clear()
                self.browser.find_element(By.CSS_SELECTOR, brute_conf['^USER^']).send_keys(self.login)
                self.browser.find_element(By.CSS_SELECTOR, brute_conf['^PASS^']).clear()
                self.browser.find_element(By.CSS_SELECTOR, brute_conf['^PASS^']).send_keys(word)
                self.browser.find_element(By.CSS_SELECTOR, brute_conf['^SUBMIT^']).click()
                time.sleep(1)

                self.logger.item(word, self.browser.page_source, True)

                if ( (len(self.false_phrase) and not self.browser.page_source.count(self.false_phrase)) or
                         (len(self.true_phrase) and self.browser.page_source.count(self.true_phrase)) ):
                    self.result.append({'word': word, 'content': self.browser.page_source})
                    #self.logger.log("Result: {0}".format(word))

                    if len(self.result) >= int(Registry().get('config')['main']['positive_limit_stop']):
                        Registry().set('positive_limit_stop', True)

                    if int(self.first_stop):
                        self.done = True
                        self.pass_found = True
                        break
                    else:
                        # Иначе старая сессия останется и будет куча false-positive
                        self.browser_close()
                        self.browser_create()
                need_retest = False
            except Queue.Empty:
                self.done = True
                break
            except TimeoutException as e:
                need_retest = True
                self.browser_close()
                self.browser_create()
                continue
            except UnicodeDecodeError as e:
                self.logger.ex(e)
                need_retest = False
            except BaseException as e:
                try:
                    need_retest = True
                    if len(e.args) and e.args[0] == 111:
                        self.browser_close()
                        self.browser_create()
                    elif not str(e).count('Timed out waiting for page load'):
                        self.logger.ex(e)
                except UnicodeDecodeError:
                    need_retest = False
            self.up_requests_count()

        self.browser_close()
示例#59
0
 def __init__(self, maxsize=0):
     WSJob.__init__(self, maxsize)
     self.collection = Registry().get('mongo')[self.collection_name]