def __init__(self): CrawlPlugin.__init__(self) self._already_visited = ScalableBloomFilter() # User options self._fuzz_images = False self._max_digit_sections = 4
def __init__(self): CrawlPlugin.__init__(self) self._headers = None self._first_time = True self._fuzz_images = False self._seen = ScalableBloomFilter()
def __init__(self): CrawlPlugin.__init__(self) # Internal variables self._exec = True self._release_db = os.path.join(ROOT_PATH, 'plugins', 'crawl', 'wordpress_fingerprint', 'release.db')
def __init__(self): CrawlPlugin.__init__(self) self._first_captured_request = True # User configured parameters self._listen_address = '127.0.0.1' self._listen_port = ports.SPIDERMAN
def __init__(self): CrawlPlugin.__init__(self) # internal variables self._exec = True self._already_analyzed = ScalableBloomFilter() # User configured parameters self._db_file = os.path.join(ROOT_PATH, 'plugins', 'crawl', 'pykto', 'scan_database.db') self._extra_db_file = os.path.join(ROOT_PATH, 'plugins', 'crawl', 'pykto', 'w3af_scan_database.db') self._cgi_dirs = ['/cgi-bin/'] self._admin_dirs = ['/admin/', '/adm/'] self._users = ['adm', 'bin', 'daemon', 'ftp', 'guest', 'listen', 'lp', 'mysql', 'noaccess', 'nobody', 'nobody4', 'nuucp', 'operator', 'root', 'smmsp', 'smtp', 'sshd', 'sys', 'test', 'unknown'] self._nuke = ['/', '/postnuke/', '/postnuke/html/', '/modules/', '/phpBB/', '/forum/'] self._mutate_tests = False
def __init__(self): CrawlPlugin.__init__(self) # Internal variables self._ghdb_file = os.path.join(ROOT_PATH, 'plugins', 'crawl', 'ghdb', 'GHDB.xml') # User configured variables self._result_limit = 300
def __init__(self): CrawlPlugin.__init__(self) # User configured variables self._identify_OS = True self._identify_applications = True # For testing self._do_fast_search = False
def __init__(self): CrawlPlugin.__init__(self) # User configured parameters self._wordlist = os.path.join(ROOT_PATH, 'plugins', 'crawl', 'ria_enumerator', 'common_filenames.db') # This is a list of common file extensions for google gears manifest: self._extensions = ['', '.php', '.json', '.txt', '.gears']
def __init__(self): CrawlPlugin.__init__(self) # Internal variables self._already_crawled = ScalableBloomFilter() self._already_verified = ScalableBloomFilter() # User configured parameters self._max_depth = 3
def __init__(self): CrawlPlugin.__init__(self) # Internal variables self._analyzed_dirs = ScalableBloomFilter() # -rw-r--r-- 1 andresr w3af 8139 Apr 12 13:23 foo.zip regex_str = '[a-z-]{10}\s*\d+\s*(.*?)\s+(.*?)\s+\d+\s+\w+\s+\d+\s+[0-9:]{4,5}\s+(.*)' self._listing_parser_re = re.compile(regex_str)
def __init__(self): CrawlPlugin.__init__(self) # Internal variables self._compiled_ignore_re = None self._compiled_follow_re = None self._broken_links = DiskSet(table_prefix='hidden_payment_gateway') self._first_run = True self._target_urls = [] self._target_domain = None
def __init__(self): """ CHANGELOG: Feb/17/2009- Added PHP Settings Audit Checks by Aung Khant (aungkhant[at]yehg.net) """ CrawlPlugin.__init__(self) # Internal variables self._analyzed_dirs = DiskSet(table_prefix='phpinfo') self._has_audited = 0
def __init__(self): """ CHANGELOG: Feb/17/2009- Added PHP Settings Audit Checks by Aung Khant (aungkhant[at]yehg.net) """ CrawlPlugin.__init__(self) # Internal variables self._analyzed_dirs = DiskSet() self._has_audited = 0
def __init__(self): CrawlPlugin.__init__(self) # Internal variables self._first_run = True self._already_analyzed = DiskSet(table_prefix='open_api') # User configured variables self._query_string_auth = '' self._header_auth = '' self._no_spec_validation = False self._custom_spec_location = '' self._discover_fuzzable_headers = True
def __init__(self): CrawlPlugin.__init__(self) # User configured parameters self._dir_list = os.path.join(self.BASE_PATH, 'common_dirs_small.db') self._file_list = os.path.join(self.BASE_PATH, 'common_files_small.db') self._bf_directories = True self._bf_files = False self._be_recursive = False # Internal variables self._exec = True self._already_tested = DiskSet(table_prefix='dir_file_bruter')
def __init__(self): CrawlPlugin.__init__(self) # User configured parameters self._wordlist = os.path.join(ROOT_PATH, "plugins", "crawl", "content_negotiation", "common_filenames.db") # Internal variables self._already_tested_dir = ScalableBloomFilter() self._already_tested_resource = ScalableBloomFilter() self._content_negotiation_enabled = None self._to_bruteforce = Queue.Queue() # I want to try 3 times to see if the remote host is vulnerable # detection is not thaaaat accurate! self._tries_left = 3
def __init__(self): CrawlPlugin.__init__(self) # Internal variables self._analyzed_dirs = ScalableBloomFilter() self._analyzed_filenames = ScalableBloomFilter() self._dvcs = { 'git repository': {}, 'git ignore': {}, 'hg repository': {}, 'hg ignore': {}, 'bzr repository': {}, 'bzr ignore': {}, 'svn repository': {}, 'svn ignore': {}, 'cvs repository': {}, 'cvs ignore': {} } self._dvcs['git repository']['filename'] = '.git/index' self._dvcs['git repository']['function'] = self.git_index self._dvcs['git ignore']['filename'] = '.gitignore' self._dvcs['git ignore']['function'] = self.ignore_file self._dvcs['hg repository']['filename'] = '.hg/dirstate' self._dvcs['hg repository']['function'] = self.hg_dirstate self._dvcs['hg ignore']['filename'] = '.hgignore' self._dvcs['hg ignore']['function'] = self.ignore_file self._dvcs['bzr repository']['filename'] = '.bzr/checkout/dirstate' self._dvcs['bzr repository']['function'] = self.bzr_checkout_dirstate self._dvcs['bzr ignore']['filename'] = '.bzrignore' self._dvcs['bzr ignore']['function'] = self.ignore_file self._dvcs['svn repository']['filename'] = '.svn/entries' self._dvcs['svn repository']['function'] = self.svn_entries self._dvcs['svn ignore']['filename'] = '.svnignore' self._dvcs['svn ignore']['function'] = self.ignore_file self._dvcs['cvs repository']['filename'] = 'CVS/Entries' self._dvcs['cvs repository']['function'] = self.cvs_entries self._dvcs['cvs ignore']['filename'] = '.cvsignore' self._dvcs['cvs ignore']['function'] = self.ignore_file
def __init__(self): CrawlPlugin.__init__(self) # Internal variables self._analyzed_dirs = ScalableBloomFilter() self._analyzed_filenames = ScalableBloomFilter() self._dvcs = {} self._dvcs['git repository'] = {} self._dvcs['git ignore'] = {} self._dvcs['hg repository'] = {} self._dvcs['hg ignore'] = {} self._dvcs['bzr repository'] = {} self._dvcs['bzr ignore'] = {} self._dvcs['svn repository'] = {} self._dvcs['svn ignore'] = {} self._dvcs['cvs repository'] = {} self._dvcs['cvs ignore'] = {} self._dvcs['git repository']['filename'] = '.git/index' self._dvcs['git repository']['function'] = self.git_index self._dvcs['git ignore']['filename'] = '.gitignore' self._dvcs['git ignore']['function'] = self.ignore_file self._dvcs['hg repository']['filename'] = '.hg/dirstate' self._dvcs['hg repository']['function'] = self.hg_dirstate self._dvcs['hg ignore']['filename'] = '.hgignore' self._dvcs['hg ignore']['function'] = self.ignore_file self._dvcs['bzr repository']['filename'] = '.bzr/checkout/dirstate' self._dvcs['bzr repository']['function'] = self.bzr_checkout_dirstate self._dvcs['bzr ignore']['filename'] = '.bzrignore' self._dvcs['bzr ignore']['function'] = self.ignore_file self._dvcs['svn repository']['filename'] = '.svn/entries' self._dvcs['svn repository']['function'] = self.svn_entries self._dvcs['svn ignore']['filename'] = '.svnignore' self._dvcs['svn ignore']['function'] = self.ignore_file self._dvcs['cvs repository']['filename'] = 'CVS/Entries' self._dvcs['cvs repository']['function'] = self.cvs_entries self._dvcs['cvs ignore']['filename'] = '.cvsignore' self._dvcs['cvs ignore']['function'] = self.ignore_file
def __init__(self): CrawlPlugin.__init__(self) # Internal variables self._compiled_ignore_re = None self._compiled_follow_re = None self._broken_links = DiskSet() self._first_run = True self._known_variants = VariantDB() self._already_filled_form = ScalableBloomFilter() # User configured variables self._ignore_regex = '' self._follow_regex = '.*' self._only_forward = False self._compile_re()
def __init__(self): CrawlPlugin.__init__(self) # User configured parameters self._wordlist = os.path.join(ROOT_PATH, 'plugins', 'crawl', 'content_negotiation', 'common_filenames.db') # Internal variables self._already_tested_dir = ScalableBloomFilter() self._already_tested_resource = ScalableBloomFilter() self._content_negotiation_enabled = None self._to_bruteforce = Queue.Queue() # I want to try 3 times to see if the remote host is vulnerable # detection is not thaaaat accurate! self._tries_left = 3
def __init__(self): CrawlPlugin.__init__(self) # Internal variables self._compiled_ignore_re = None self._compiled_follow_re = None self._broken_links = DiskSet(table_prefix='web_spider') self._first_run = True self._target_urls = [] self._target_domain = None self._already_filled_form = ScalableBloomFilter() self._variant_db = VariantDB() # User configured variables self._ignore_regex = '' self._follow_regex = '.*' self._only_forward = False self._compile_re()
def __init__(self): CrawlPlugin.__init__(self) # Internal variables self._first = True self._start_path = None self._not_exist_remote = [] self._exist_remote = [] self._not_eq_content = [] self._eq_content = [] # Configuration self._ban_url = ['asp', 'jsp', 'php'] self._content = True self._local_dir = '' self._remote_url_path = URL('http://host.tld/')
def __init__(self): CrawlPlugin.__init__(self) # Internal variables self._analyzed_dirs = ScalableBloomFilter() self._analyzed_filenames = ScalableBloomFilter() self._dvcs = [DVCSTest('.git/index', 'git repository', self.git_index), DVCSTest('.gitignore', 'git ignore', self.ignore_file), DVCSTest('.hg/dirstate', 'hg repository', self.hg_dirstate), DVCSTest('.hgignore', 'hg ignore', self.ignore_file), DVCSTest('.bzr/checkout/dirstate', 'bzr repository', self.bzr_checkout_dirstate), DVCSTest('.bzrignore', 'bzr ignore', self.ignore_file), DVCSTest('.svn/entries', 'svn repository', self.svn_entries), DVCSTest('.svn/wc.db', 'svn repository db', self.svn_wc_db), DVCSTest('.svnignore', 'svn ignore', self.ignore_file), DVCSTest('CVS/Entries', 'cvs repository', self.cvs_entries), DVCSTest('.cvsignore', 'cvs ignore', self.ignore_file)]
def __init__(self): CrawlPlugin.__init__(self) # User configured parameters self._wordlist = os.path.join(ROOT_PATH, 'plugins', 'crawl', 'content_negotiation', 'common_filenames.db') # Internal variables self._already_tested_dir = ScalableBloomFilter() self._already_tested_resource = ScalableBloomFilter() # Test queue # # Note that this queue can have ~20 items in the worse case scenario # it is not a risk to store it all in memory self._to_bruteforce = Queue.Queue() # Run N checks to verify if content negotiation is enabled self._tries_left = 3 self._content_negotiation_enabled = None
def __init__(self): CrawlPlugin.__init__(self) # internal variables self._exec = True self._already_analyzed = ScalableBloomFilter() # User configured parameters self._db_file = os.path.join(ROOT_PATH, "plugins", "crawl", "pykto", "scan_database.db") self._extra_db_file = os.path.join(ROOT_PATH, "plugins", "crawl", "pykto", "w3af_scan_database.db") self._cgi_dirs = ["/cgi-bin/"] self._admin_dirs = ["/admin/", "/adm/"] self._users = [ "adm", "bin", "daemon", "ftp", "guest", "listen", "lp", "mysql", "noaccess", "nobody", "nobody4", "nuucp", "operator", "root", "smmsp", "smtp", "sshd", "sys", "test", "unknown", ] self._nuke = ["/", "/postnuke/", "/postnuke/html/", "/modules/", "/phpBB/", "/forum/"] self._mutate_tests = False
def __init__(self): CrawlPlugin.__init__(self) # Internal variables self._analyzed_dirs = DiskSet(table_prefix='phpinfo') self._has_audited = False
def __init__(self): CrawlPlugin.__init__(self) # User variables self._result_limit = 300
def __init__(self): CrawlPlugin.__init__(self) # Internal variables self._analyzed_dirs = ScalableBloomFilter() self._signature_re = None
def __init__(self): CrawlPlugin.__init__(self) self._already_tested = ScalableBloomFilter()
def __init__(self): CrawlPlugin.__init__(self) self._multi_in = None
def __init__(self): CrawlPlugin.__init__(self) # Internal variables self._exec = True
def __init__(self): CrawlPlugin.__init__(self)
def __init__(self): CrawlPlugin.__init__(self) # Internal variables self._analyzed_dirs = ScalableBloomFilter()
def __init__(self): CrawlPlugin.__init__(self) self._captchas_found = DiskSet(table_prefix='find_captchas')
def __init__(self): CrawlPlugin.__init__(self) # Internal variables self._analyzed_dirs = DiskSet() self._has_audited = 0
def __init__(self): CrawlPlugin.__init__(self) # User configured parameters self._input_csv = '' self._input_burp = ''
def __init__(self): CrawlPlugin.__init__(self) self._to_check_esm = None
def __init__(self): CrawlPlugin.__init__(self) # User defined parameters self._wordnet_results = 5
def __init__(self): CrawlPlugin.__init__(self) self.count = 0 self.loops = 20
def __init__(self): CrawlPlugin.__init__(self) # Internal variables self._already_tested = ScalableBloomFilter()