예제 #1
0
    def setup(self):
        """
        Setup all the required backend stores. This was mostly created to avoid
        starting any threads during __init__() which is called during python's
        import phase and dead-locks in some cases.

        :return: None
        """
        with self._kb_lock:
            if self.initialized:
                return

            self.urls = DiskSet(table_prefix='kb_urls')
            self.fuzzable_requests = DiskSet(table_prefix='kb_fuzzable_requests')

            self.db = get_default_persistent_db_instance()

            self.table_name = 'knowledge_base_' + rand_alpha(30)
            self.db.create_table(self.table_name, self.COLUMNS)
            self.db.create_index(self.table_name, ['location_a', 'location_b'])
            self.db.create_index(self.table_name, ['uniq_id'])
            self.db.commit()

            # Only initialize once
            self.initialized = True
예제 #2
0
    def __init__(self):
        GrepPlugin.__init__(self)

        self._total_http_request_count = 0
        self._vuln_count = 0
        self._vuln_urls = DiskSet(table_prefix='click_jacking')
        self._vuln_ids = DiskSet(table_prefix='click_jacking')
예제 #3
0
    def cleanup(self):
        """
        Cleanup internal data.
        """
        self.db.execute("DELETE FROM %s WHERE 1=1" % self.table_name)

        # Remove the old, create new.
        self.urls.cleanup()
        self.urls = DiskSet()

        self.fuzzable_requests.cleanup()
        self.fuzzable_requests = DiskSet()

        self.observers.clear()
예제 #4
0
    def test_add_HTTPPostDataRequest(self):
        ds = DiskSet()

        uri = URL('http://w3af.org/?id=2')
        hdr = Headers([('Referer', 'http://w3af.org/')])

        pdr1 = HTTPPostDataRequest(uri, method='GET', headers=hdr)

        uri = URL('http://w3af.org/?id=3')
        pdr2 = HTTPPostDataRequest(uri, method='GET', headers=hdr)

        uri = URL('http://w3af.org/?id=7')
        pdr3 = HTTPPostDataRequest(uri, method='FOO', headers=hdr)

        ds.add(pdr1)
        ds.add(pdr2)
        ds.add(pdr2)
        ds.add(pdr1)

        self.assertEqual(ds[0], pdr1)
        self.assertEqual(ds[1], pdr2)
        self.assertFalse(pdr3 in ds)
        self.assertTrue(pdr2 in ds)
        self.assertEqual(len(ds), 2)

        # This forces an internal change in the URL object
        pdr2.get_url().url_string
        self.assertTrue(pdr2 in ds)
예제 #5
0
    def test_add_QsRequest(self):
        ds = DiskSet()

        uri = URL('http://w3af.org/?id=2')
        hdr = Headers([('Referer', 'http://w3af.org/')])

        qsr1 = FuzzableRequest(uri, method='GET', headers=hdr)

        uri = URL('http://w3af.org/?id=3')
        qsr2 = FuzzableRequest(uri, method='GET', headers=hdr)

        uri = URL('http://w3af.org/?id=7')
        qsr3 = FuzzableRequest(uri, method='FOO', headers=hdr)

        ds.add(qsr1)
        ds.add(qsr2)
        ds.add(qsr2)
        ds.add(qsr1)

        self.assertEqual(ds[0], qsr1)
        self.assertEqual(ds[1], qsr2)
        self.assertFalse(qsr3 in ds)
        self.assertTrue(qsr2 in ds)
        self.assertEqual(len(ds), 2)

        # This forces an internal change in the URL object
        qsr2.get_url().url_string
        self.assertIn(qsr2, ds)
예제 #6
0
    def test_store_fuzzable_request_two(self):
        ds = DiskSet()

        # Add a simple fr, without post-data
        fr = FuzzableRequest(URL('http://example.com/?id=1'))
        ds.add(fr)

        # Add a fr with post-data
        form_params = FormParameters()
        form_params.add_field_by_attr_items([("name", "username"),
                                             ("value", "abc")])
        form_params.add_field_by_attr_items([("name", "address"),
                                             ("value", "")])
        form_params.set_action(URL('http://example.com/?id=1'))
        form_params.set_method('post')

        form = dc_from_form_params(form_params)

        fr = FuzzableRequest.from_form(form)
        ds.add(fr)

        # Compare
        stored_fr = ds[1]

        self.assertEqual(stored_fr, fr)
        self.assertIsNot(stored_fr, fr)
예제 #7
0
    def __init__(self):
        GrepPlugin.__init__(self)

        self._analyzed_hashes = DiskSet(table_prefix='retirejs')
        self._retirejs_path = self._get_retirejs_path()
        self._retirejs_exit_code_result = None
        self._retirejs_exit_code_was_run = False
예제 #8
0
    def test_store_in_disk_set(self):
        boundary, post_data = multipart_encode([
            ('a', 'bcd'),
        ], [])
        multipart_boundary = MultipartContainer.MULTIPART_HEADER

        headers = Headers([('content-length', str(len(post_data))),
                           ('content-type', multipart_boundary % boundary)])

        dc = MultipartContainer.from_postdata(headers, post_data)

        dc.set_token(('a', 0))

        disk_set = DiskSet()
        disk_set.add(dc)

        dc_read = disk_set[0]

        # These are different objects
        self.assertIsNot(dc_read, dc)

        # But they hold the same data
        self.assertEqual(dc.get_token(), dc_read.get_token())
        self.assertIsNotNone(dc.get_token())
        self.assertIsNotNone(dc_read.get_token())
        self.assertEqual(dc_read.get_token().get_name(), 'a')
예제 #9
0
 def __init__(self):
     super(ParserCache, self).__init__()
     
     self._cache = SynchronizedLRUDict(self.CACHE_SIZE)
     self._can_parse_cache = SynchronizedLRUDict(self.CACHE_SIZE * 10)
     self._parser_finished_events = {}
     self._parser_blacklist = DiskSet()
예제 #10
0
    def test_add(self):
        ds = DiskSet()
        ds.add(1)
        ds.add(2)
        ds.add(3)
        ds.add(1)

        self.assertEqual(list(ds), [1, 2, 3])
        self.assertEqual(len(ds), 3)
        self.assertEqual(unicode(ds), u'<DiskSet [1, 2, 3]>')
예제 #11
0
    def test_disk_set(self):
        ds = DiskSet()

        for i in xrange(20000):
            data = (i, i)
            ds.add(data)

        for i in xrange(20000):
            data = (i, i)
            data in ds
예제 #12
0
    def __init__(self):
        CrawlPlugin.__init__(self)

        # Internal variables
        self._compiled_ignore_re = None
        self._compiled_follow_re = None
        self._broken_links = DiskSet(table_prefix='hidden_payment_gateway')
        self._first_run = True
        self._target_urls = []
		self._target_domain = None
예제 #13
0
    def __init__(self):
        """
        CHANGELOG:
            Feb/17/2009- Added PHP Settings Audit Checks by Aung Khant
            (aungkhant[at]yehg.net)
        """
        CrawlPlugin.__init__(self)

        # Internal variables
        self._analyzed_dirs = DiskSet(table_prefix='phpinfo')
        self._has_audited = 0
예제 #14
0
    def test_add_urlobject(self):
        ds = DiskSet()

        ds.add(URL('http://w3af.org/?id=2'))
        ds.add(URL('http://w3af.org/?id=3'))
        ds.add(URL('http://w3af.org/?id=3'))

        self.assertEqual(ds[0], URL('http://w3af.org/?id=2'))
        self.assertEqual(ds[1], URL('http://w3af.org/?id=3'))
        self.assertEqual(len(ds), 2)
        self.assertFalse(URL('http://w3af.org/?id=4') in ds)
        self.assertTrue(URL('http://w3af.org/?id=2') in ds)
예제 #15
0
    def test_table_name_with_prefix(self):
        _unittest = 'unittest'
        disk_set = DiskSet(_unittest)

        self.assertIn(_unittest, disk_set.table_name)
        db = get_default_temp_db_instance()

        self.assertTrue(db.table_exists(disk_set.table_name))

        disk_set.cleanup()

        self.assertFalse(db.table_exists(disk_set.table_name))
예제 #16
0
파일: open_api.py 프로젝트: yvonneKim/w3af
    def __init__(self):
        CrawlPlugin.__init__(self)

        # Internal variables
        self._first_run = True
        self._already_analyzed = DiskSet(table_prefix='open_api')

        # User configured variables
        self._query_string_auth = ''
        self._header_auth = ''
        self._no_spec_validation = False
        self._custom_spec_location = ''
        self._discover_fuzzable_headers = True
예제 #17
0
    def test_remove_table(self):
        disk_set = DiskSet()
        disk_set.add(1)
        disk_set.add(2)

        table_name = disk_set.table_name
        db = get_default_temp_db_instance()

        self.assertTrue(db.table_exists(table_name))

        disk_set.cleanup()

        self.assertFalse(db.table_exists(table_name))
예제 #18
0
    def __init__(self):
        CrawlPlugin.__init__(self)

        # User configured parameters
        self._dir_list = os.path.join(self.BASE_PATH, 'common_dirs_small.db')
        self._file_list = os.path.join(self.BASE_PATH, 'common_files_small.db')

        self._bf_directories = True
        self._bf_files = False
        self._be_recursive = False

        # Internal variables
        self._exec = True
        self._already_tested = DiskSet(table_prefix='dir_file_bruter')
예제 #19
0
    def __init__(self):
        super(DBKnowledgeBase, self).__init__()

        self.urls = DiskSet()
        self.fuzzable_requests = DiskSet()

        self.db = get_default_persistent_db_instance()

        columns = [('location_a', 'TEXT'), ('location_b', 'TEXT'),
                   ('uniq_id', 'TEXT'), ('pickle', 'BLOB')]

        self.table_name = rand_alpha(30)
        self.db.create_table(self.table_name, columns)
        self.db.create_index(self.table_name, ['location_a', 'location_b'])
        self.db.create_index(self.table_name, [
            'uniq_id',
        ])
        self.db.commit()

        # TODO: Why doesn't this work with a WeakValueDictionary?
        self.observers = {}  #WeakValueDictionary()
        self.type_observers = {}  #WeakValueDictionary()
        self.url_observers = []
        self._observer_id = 0
예제 #20
0
    def __init__(self):
        CrawlPlugin.__init__(self)

        # Internal variables
        self._compiled_ignore_re = None
        self._compiled_follow_re = None
        self._broken_links = DiskSet()
        self._first_run = True
        self._known_variants = VariantDB()
        self._already_filled_form = ScalableBloomFilter()

        # User configured variables
        self._ignore_regex = ''
        self._follow_regex = '.*'
        self._only_forward = False
        self._compile_re()
예제 #21
0
    def __init__(self):
        CrawlPlugin.__init__(self)

        # Internal variables
        self._compiled_ignore_re = None
        self._compiled_follow_re = None
        self._broken_links = DiskSet(table_prefix='web_spider')
        self._first_run = True
        self._target_urls = []
        self._target_domain = None
        self._already_filled_form = ScalableBloomFilter()
        self._variant_db = VariantDB()

        # User configured variables
        self._ignore_regex = ''
        self._follow_regex = '.*'
        self._only_forward = False
        self._compile_re()
예제 #22
0
    def test_store_fuzzable_request(self):
        form_params = FormParameters()
        form_params.add_input([("name", "username"), ("value", "abc")])
        form_params.add_input([("name", "address"), ("value", "")])
        form_params.set_action(URL('http://example.com/?id=1'))
        form_params.set_method('post')

        form = dc_from_form_params(form_params)

        fr = FuzzableRequest.from_form(form)

        ds = DiskSet()
        ds.add(fr)

        stored_fr = ds[0]

        self.assertEqual(stored_fr, fr)
        self.assertIsNot(stored_fr, fr)
예제 #23
0
    def test_multipart_fuzzable_request_store(self):
        boundary, post_data = multipart_encode([('a', 'bcd'), ], [])
        multipart_boundary = MultipartContainer.MULTIPART_HEADER

        headers = Headers([('content-length', str(len(post_data))),
                           ('content-type', multipart_boundary % boundary)])

        dc = MultipartContainer.from_postdata(headers, post_data)
        post_data = str(dc)

        fr = FuzzableRequest.from_parts(URL('http://www.w3af.com/'),
                                        method='POST', post_data=post_data,
                                        headers=headers)
        
        disk_set = DiskSet()
        disk_set.add(fr)

        fr_read = disk_set[0]

        self.assertIsInstance(fr_read.get_raw_data(), MultipartContainer)
        self.assertIn('a', fr_read.get_raw_data())
예제 #24
0
    def test_thread_safe(self):
        ds = DiskSet()

        def worker(range_inst):
            for i in range_inst:
                ds.add(i)

        threads = []
        _min = 0
        add_dups = False
        for _max in xrange(0, 1100, 100):

            th = threading.Thread(target=worker, args=(xrange(_min, _max), ))
            threads.append(th)

            # For testing the uniqueness of DiskSets
            add_dups = not add_dups
            if add_dups:
                th = threading.Thread(target=worker,
                                      args=(xrange(_min, _max), ))
                threads.append(th)

            _min = _max

        for th in threads:
            th.start()

        for th in threads:
            th.join()

        for i in xrange(0, 1000):
            self.assertTrue(i in ds, i)

        ds_as_list = list(ds)
        self.assertEqual(len(ds_as_list), len(set(ds_as_list)))

        ds_as_list.sort()
        self.assertEqual(ds_as_list, range(1000))
예제 #25
0
파일: phpinfo.py 프로젝트: intfrr/Tortazo
    def __init__(self):
        CrawlPlugin.__init__(self)

        # Internal variables
        self._analyzed_dirs = DiskSet()
        self._has_audited = 0
예제 #26
0
    def __init__(self):
        CrawlPlugin.__init__(self)

        # Internal variables
        self._analyzed_dirs = DiskSet(table_prefix='phpinfo')
        self._has_audited = False
예제 #27
0
    def test_update(self):
        ds = DiskSet()
        ds.add(1)
        ds.update([2, 3, 1])

        self.assertEqual(list(ds), [1, 2, 3])
예제 #28
0
    def __init__(self):
        GrepPlugin.__init__(self)

        self._error_500_responses = DiskSet(table_prefix='error_500')
예제 #29
0
파일: error_500.py 프로젝트: intfrr/Tortazo
    def __init__(self):
        GrepPlugin.__init__(self)

        self._error_500_responses = DiskSet()
예제 #30
0
    def __init__(self):
        CrawlPlugin.__init__(self)

        self._captchas_found = DiskSet(table_prefix='find_captchas')