def cleanup(self): ''' Cleanup internal data. ''' self.db.execute("DELETE FROM %s WHERE 1=1" % self.table_name) # Remove the old, create new. self.urls.cleanup() self.urls = DiskSet() self.fuzzable_requests.cleanup() self.fuzzable_requests = DiskSet() self.observers.clear()
def test_add_HTTPPostDataRequest(self): ds = DiskSet() uri = URL('http://w3af.org/?id=2') hdr = Headers([('Referer', 'http://w3af.org/')]) pdr1 = HTTPPostDataRequest(uri, method='GET', headers=hdr) uri = URL('http://w3af.org/?id=3') pdr2 = HTTPPostDataRequest(uri, method='GET', headers=hdr) uri = URL('http://w3af.org/?id=7') pdr3 = HTTPPostDataRequest(uri, method='FOO', headers=hdr) ds.add(pdr1) ds.add(pdr2) ds.add(pdr2) ds.add(pdr1) self.assertEqual(ds[0], pdr1) self.assertEqual(ds[1], pdr2) self.assertFalse(pdr3 in ds) self.assertTrue(pdr2 in ds) self.assertEqual(len(ds), 2) # This forces an internal change in the URL object pdr2.get_url().url_string self.assertTrue(pdr2 in ds)
def test_add(self): ds = DiskSet() ds.add(1) ds.add(2) ds.add(3) ds.add(1) self.assertEqual(list(ds), [1, 2, 3]) self.assertEqual(len(ds), 3)
def test_add_urlobject(self): ds = DiskSet() ds.add(URL('http://w3af.org/?id=2')) ds.add(URL('http://w3af.org/?id=3')) ds.add(URL('http://w3af.org/?id=3')) self.assertEqual(ds[0], URL('http://w3af.org/?id=2')) self.assertEqual(ds[1], URL('http://w3af.org/?id=3')) self.assertEqual(len(ds), 2) self.assertFalse(URL('http://w3af.org/?id=4') in ds) self.assertTrue(URL('http://w3af.org/?id=2') in ds)
def test_remove_table(self): disk_set = DiskSet() disk_set.add(1) disk_set.add(2) table_name = disk_set.table_name db = get_default_temp_db_instance() self.assertTrue(db.table_exists(table_name)) disk_set.cleanup() self.assertFalse(db.table_exists(table_name))
def __init__(self): super(DBKnowledgeBase, self).__init__() self.urls = DiskSet() self.fuzzable_requests = DiskSet() self.db = get_default_persistent_db_instance() columns = [('location_a', 'TEXT'), ('location_b', 'TEXT'), ('uniq_id', 'TEXT'), ('pickle', 'BLOB')] self.table_name = rand_alpha(30) self.db.create_table(self.table_name, columns) self.db.create_index(self.table_name, ['location_a', 'location_b']) self.db.create_index(self.table_name, [ 'uniq_id', ]) self.db.commit() # TODO: Why doesn't this work with a WeakValueDictionary? self.observers = {} #WeakValueDictionary() self.type_observers = {} #WeakValueDictionary() self._observer_id = 0
def __init__(self): CrawlPlugin.__init__(self) # Internal variables self._compiled_ignore_re = None self._compiled_follow_re = None self._broken_links = DiskSet() self._first_run = True self._known_variants = VariantDB() self._already_filled_form = ScalableBloomFilter() # User configured variables self._ignore_regex = '' self._follow_regex = '.*' self._only_forward = False self._compile_re()
def __init__(self): CrawlPlugin.__init__(self) # User configured parameters self._dir_list = os.path.join('plugins', 'crawl', 'dir_file_bruter', 'common_dirs_small.db') self._file_list = os.path.join('plugins', 'crawl', 'dir_file_bruter', 'common_files_small.db') self._bf_directories = True self._bf_files = False self._be_recursive = False # Internal variables self._exec = True self._already_tested = DiskSet()
def test_thread_safe(self): ds = DiskSet() def worker(range_inst): for i in range_inst: ds.add(i) threads = [] _min = 0 add_dups = False for _max in xrange(0, 1100, 100): th = threading.Thread(target=worker, args=(xrange(_min, _max), )) threads.append(th) # For testing the uniqueness of DiskSets add_dups = not add_dups if add_dups: th = threading.Thread(target=worker, args=(xrange(_min, _max), )) threads.append(th) _min = _max for th in threads: th.start() for th in threads: th.join() for i in xrange(0, 1000): self.assertTrue(i in ds, i) ds_as_list = list(ds) self.assertEqual(len(ds_as_list), len(set(ds_as_list))) ds_as_list.sort() self.assertEqual(ds_as_list, range(1000))
def __init__(self): CrawlPlugin.__init__(self) self._captchas_found = DiskSet()
def test_update(self): ds = DiskSet() ds.add(1) ds.update([2, 3, 1]) self.assertEqual(list(ds), [1, 2, 3])
def __init__(self): CrawlPlugin.__init__(self) # Internal variables self._analyzed_dirs = DiskSet() self._has_audited = 0
def __init__(self): GrepPlugin.__init__(self) self._error_500_responses = DiskSet()