Example #1
0
    def __init__(self):
        CrawlPlugin.__init__(self)
        self._first_captured_request = True

        # User configured parameters
        self._listen_address = "127.0.0.1"
        self._listen_port = ports.SPIDERMAN
Example #2
0
    def __init__(self):
        CrawlPlugin.__init__(self)

        # Internal variables
        self._exec = True
        self._release_db = os.path.join('plugins', 'crawl',
                                        'wordpress_fingerprint', 'release.db')
Example #3
0
    def __init__(self):
        CrawlPlugin.__init__(self)

        # internal variables
        self._exec = True
        self._already_analyzed = ScalableBloomFilter()

        # User configured parameters
        self._db_file = os.path.join('plugins', 'crawl', 'pykto',
                                     'scan_database.db')
        self._extra_db_file = os.path.join('plugins', 'crawl', 'pykto',
                                           'w3af_scan_database.db')

        self._cgi_dirs = ['/cgi-bin/']
        self._admin_dirs = ['/admin/', '/adm/']

        self._users = [
            'adm', 'bin', 'daemon', 'ftp', 'guest', 'listen', 'lp', 'mysql',
            'noaccess', 'nobody', 'nobody4', 'nuucp', 'operator', 'root',
            'smmsp', 'smtp', 'sshd', 'sys', 'test', 'unknown'
        ]

        self._nuke = [
            '/', '/postnuke/', '/postnuke/html/', '/modules/', '/phpBB/',
            '/forum/'
        ]

        self._mutate_tests = False
Example #4
0
    def __init__(self):
        CrawlPlugin.__init__(self)
        self._already_visited = ScalableBloomFilter()

        # User options
        self._fuzz_images = False
        self._max_digit_sections = 4
    def __init__(self):
        CrawlPlugin.__init__(self)

        # Internal variables
        self._exec = True
        self._release_db = os.path.join('plugins', 'crawl',
                                        'wordpress_fingerprint', 'release.db')
Example #6
0
    def __init__(self):
        CrawlPlugin.__init__(self)
        self._first_captured_request = True

        # User configured parameters
        self._listen_address = '127.0.0.1'
        self._listen_port = ports.SPIDERMAN
Example #7
0
    def __init__(self):
        CrawlPlugin.__init__(self)

        # User configured variables
        self._identify_OS = True
        self._identify_applications = True

        # For testing
        self._do_fast_search = False
Example #8
0
    def __init__(self):
        CrawlPlugin.__init__(self)

        # Internal variables
        self._already_crawled = ScalableBloomFilter()
        self._already_verified = ScalableBloomFilter()

        # User configured parameters
        self._max_depth = 3
Example #9
0
    def __init__(self):
        CrawlPlugin.__init__(self)

        # Internal variables
        self._ghdb_file = os.path.join('plugins', 'crawl',
                                       'ghdb', 'GHDB.xml')

        # User configured variables
        self._result_limit = 300
Example #10
0
    def __init__(self):
        CrawlPlugin.__init__(self)

        # User configured parameters
        self._wordlist = os.path.join('plugins', 'crawl', 'ria_enumerator',
                                      'common_filenames.db')

        # This is a list of common file extensions for google gears manifest:
        self._extensions = ['', '.php', '.json', '.txt', '.gears']
Example #11
0
    def __init__(self):
        CrawlPlugin.__init__(self)

        # User configured parameters
        self._wordlist = os.path.join('plugins', 'crawl', 'ria_enumerator',
                                      'common_filenames.db')

        # This is a list of common file extensions for google gears manifest:
        self._extensions = ['', '.php', '.json', '.txt', '.gears']
Example #12
0
    def __init__(self):
        CrawlPlugin.__init__(self)

        # Internal variables
        self._analyzed_dirs = ScalableBloomFilter()

        # -rw-r--r--    1 andresr   w3af         8139 Apr 12 13:23 foo.zip
        regex_str = '[a-z-]{10}\s*\d+\s*(.*?)\s+(.*?)\s+\d+\s+\w+\s+\d+\s+[0-9:]{4,5}\s+(.*)'
        self._listing_parser_re = re.compile(regex_str)
Example #13
0
    def __init__(self):
        CrawlPlugin.__init__(self)

        # Internal variables
        self._already_crawled = ScalableBloomFilter()
        self._already_verified = ScalableBloomFilter()

        # User configured parameters
        self._max_depth = 3
Example #14
0
    def __init__(self):
        CrawlPlugin.__init__(self)

        # Internal variables
        self._analyzed_dirs = ScalableBloomFilter()

        # -rw-r--r--    1 andresr   w3af         8139 Apr 12 13:23 foo.zip
        regex_str = '[a-z-]{10}\s*\d+\s*(.*?)\s+(.*?)\s+\d+\s+\w+\s+\d+\s+[0-9:]{4,5}\s+(.*)'
        self._listing_parser_re = re.compile(regex_str)
Example #15
0
    def __init__(self):
        CrawlPlugin.__init__(self)

        # Internal variables
        self._analyzed_dirs = ScalableBloomFilter()
        self._analyzed_filenames = ScalableBloomFilter()

        self._dvcs = {}
        self._dvcs['git repository'] = {}
        self._dvcs['git ignore'] = {}
        self._dvcs['hg repository'] = {}
        self._dvcs['hg ignore'] = {}
        self._dvcs['bzr repository'] = {}
        self._dvcs['bzr ignore'] = {}
        self._dvcs['svn repository'] = {}
        self._dvcs['svn ignore'] = {}
        self._dvcs['cvs repository'] = {}
        self._dvcs['cvs ignore'] = {}

        self._dvcs['git repository']['filename'] = '.git/index'
        self._dvcs['git repository']['function'] = self.git_index

        self._dvcs['git ignore']['filename'] = '.gitignore'
        self._dvcs['git ignore']['function'] = self.ignore_file

        self._dvcs['hg repository']['filename'] = '.hg/dirstate'
        self._dvcs['hg repository']['function'] = self.hg_dirstate

        self._dvcs['hg ignore']['filename'] = '.hgignore'
        self._dvcs['hg ignore']['function'] = self.ignore_file

        self._dvcs['bzr repository']['filename'] = '.bzr/checkout/dirstate'
        self._dvcs['bzr repository']['function'] = self.bzr_checkout_dirstate

        self._dvcs['bzr ignore']['filename'] = '.bzrignore'
        self._dvcs['bzr ignore']['function'] = self.ignore_file

        self._dvcs['svn repository']['filename'] = '.svn/entries'
        self._dvcs['svn repository']['function'] = self.svn_entries

        self._dvcs['svn ignore']['filename'] = '.svnignore'
        self._dvcs['svn ignore']['function'] = self.ignore_file

        self._dvcs['cvs repository']['filename'] = 'CVS/Entries'
        self._dvcs['cvs repository']['function'] = self.cvs_entries

        self._dvcs['cvs ignore']['filename'] = '.cvsignore'
        self._dvcs['cvs ignore']['function'] = self.ignore_file
Example #16
0
    def __init__(self):
        CrawlPlugin.__init__(self)

        # Internal variables
        self._analyzed_dirs = ScalableBloomFilter()
        self._analyzed_filenames = ScalableBloomFilter()

        self._dvcs = {}
        self._dvcs['git repository'] = {}
        self._dvcs['git ignore'] = {}
        self._dvcs['hg repository'] = {}
        self._dvcs['hg ignore'] = {}
        self._dvcs['bzr repository'] = {}
        self._dvcs['bzr ignore'] = {}
        self._dvcs['svn repository'] = {}
        self._dvcs['svn ignore'] = {}
        self._dvcs['cvs repository'] = {}
        self._dvcs['cvs ignore'] = {}

        self._dvcs['git repository']['filename'] = '.git/index'
        self._dvcs['git repository']['function'] = self.git_index

        self._dvcs['git ignore']['filename'] = '.gitignore'
        self._dvcs['git ignore']['function'] = self.ignore_file

        self._dvcs['hg repository']['filename'] = '.hg/dirstate'
        self._dvcs['hg repository']['function'] = self.hg_dirstate

        self._dvcs['hg ignore']['filename'] = '.hgignore'
        self._dvcs['hg ignore']['function'] = self.ignore_file

        self._dvcs['bzr repository']['filename'] = '.bzr/checkout/dirstate'
        self._dvcs['bzr repository']['function'] = self.bzr_checkout_dirstate

        self._dvcs['bzr ignore']['filename'] = '.bzrignore'
        self._dvcs['bzr ignore']['function'] = self.ignore_file

        self._dvcs['svn repository']['filename'] = '.svn/entries'
        self._dvcs['svn repository']['function'] = self.svn_entries

        self._dvcs['svn ignore']['filename'] = '.svnignore'
        self._dvcs['svn ignore']['function'] = self.ignore_file

        self._dvcs['cvs repository']['filename'] = 'CVS/Entries'
        self._dvcs['cvs repository']['function'] = self.cvs_entries

        self._dvcs['cvs ignore']['filename'] = '.cvsignore'
        self._dvcs['cvs ignore']['function'] = self.ignore_file
Example #17
0
    def __init__(self):
        CrawlPlugin.__init__(self)

        # User configured parameters
        self._dir_list = os.path.join('plugins', 'crawl', 'dir_file_bruter',
                                      'common_dirs_small.db')
        self._file_list = os.path.join('plugins', 'crawl', 'dir_file_bruter',
                                      'common_files_small.db')

        self._bf_directories = True
        self._bf_files = False
        self._be_recursive = False

        # Internal variables
        self._exec = True
        self._already_tested = DiskSet()
Example #18
0
    def __init__(self):
        CrawlPlugin.__init__(self)

        # User configured parameters
        self._wordlist = os.path.join('plugins', 'crawl',
                                      'content_negotiation',
                                      'common_filenames.db')

        # Internal variables
        self._already_tested_dir = ScalableBloomFilter()
        self._already_tested_resource = ScalableBloomFilter()
        self._content_negotiation_enabled = None
        self._to_bruteforce = Queue.Queue()
        # I want to try 3 times to see if the remote host is vulnerable
        # detection is not thaaat accurate!
        self._tries_left = 3
Example #19
0
    def __init__(self):
        CrawlPlugin.__init__(self)

        # User configured parameters
        self._wordlist = os.path.join(
            'plugins', 'crawl', 'content_negotiation',
            'common_filenames.db')

        # Internal variables
        self._already_tested_dir = ScalableBloomFilter()
        self._already_tested_resource = ScalableBloomFilter()
        self._content_negotiation_enabled = None
        self._to_bruteforce = Queue.Queue()
        # I want to try 3 times to see if the remote host is vulnerable
        # detection is not thaaat accurate!
        self._tries_left = 3
Example #20
0
    def __init__(self):
        CrawlPlugin.__init__(self)

        # Internal variables
        self._compiled_ignore_re = None
        self._compiled_follow_re = None
        self._broken_links = DiskSet()
        self._first_run = True
        self._known_variants = VariantDB()
        self._already_filled_form = ScalableBloomFilter()

        # User configured variables
        self._ignore_regex = ""
        self._follow_regex = ".*"
        self._only_forward = False
        self._compile_re()
Example #21
0
    def __init__(self):
        CrawlPlugin.__init__(self)

        # Internal variables
        self._compiled_ignore_re = None
        self._compiled_follow_re = None
        self._broken_links = DiskSet()
        self._first_run = True
        self._known_variants = VariantDB()
        self._already_filled_form = ScalableBloomFilter()

        # User configured variables
        self._ignore_regex = ''
        self._follow_regex = '.*'
        self._only_forward = False
        self._compile_re()
Example #22
0
    def __init__(self):
        CrawlPlugin.__init__(self)

        # Internal variables
        self._first = True
        self._start_path = None

        self._not_exist_remote = []
        self._exist_remote = []

        self._not_eq_content = []
        self._eq_content = []

        # Configuration
        self._ban_url = ['asp', 'jsp', 'php']
        self._content = True
        self._local_dir = ''
        self._remote_url_path = URL('http://host.tld/')
Example #23
0
    def __init__(self):
        CrawlPlugin.__init__(self)

        # Internal variables
        self._first = True
        self._start_path = None

        self._not_exist_remote = []
        self._exist_remote = []

        self._not_eq_content = []
        self._eq_content = []

        # Configuration
        self._ban_url = ['asp', 'jsp', 'php']
        self._content = True
        self._local_dir = ''
        self._remote_url_path = URL('http://host.tld/')
Example #24
0
    def __init__(self):
        CrawlPlugin.__init__(self)

        # internal variables
        self._exec = True
        self._already_analyzed = ScalableBloomFilter()

        # User configured parameters
        self._db_file = os.path.join("plugins", "crawl", "pykto", "scan_database.db")
        self._extra_db_file = os.path.join("plugins", "crawl", "pykto", "w3af_scan_database.db")

        self._cgi_dirs = ["/cgi-bin/"]
        self._admin_dirs = ["/admin/", "/adm/"]

        self._users = [
            "adm",
            "bin",
            "daemon",
            "ftp",
            "guest",
            "listen",
            "lp",
            "mysql",
            "noaccess",
            "nobody",
            "nobody4",
            "nuucp",
            "operator",
            "root",
            "smmsp",
            "smtp",
            "sshd",
            "sys",
            "test",
            "unknown",
        ]

        self._nuke = ["/", "/postnuke/", "/postnuke/html/", "/modules/", "/phpBB/", "/forum/"]

        self._mutate_tests = False
Example #25
0
    def __init__(self):
        CrawlPlugin.__init__(self)

        # internal variables
        self._exec = True
        self._already_analyzed = ScalableBloomFilter()

        # User configured parameters
        self._extra_db_file = os.path.join('plugins', 'crawl', 'pykto',
                                           'w3af_scan_database.db')

        self._cgi_dirs = ['/cgi-bin/']
        self._admin_dirs = ['/admin/', '/adm/']

        self._users = ['adm', 'bin', 'daemon', 'ftp', 'guest', 'listen', 'lp',
                       'mysql', 'noaccess', 'nobody', 'nobody4', 'nuucp', 'operator',
                       'root', 'smmsp', 'smtp', 'sshd', 'sys', 'test', 'unknown']

        self._nuke = ['/', '/postnuke/', '/postnuke/html/', '/modules/', '/phpBB/',
                      '/forum/']

        self._mutate_tests = False
Example #26
0
 def __init__(self):
     CrawlPlugin.__init__(self)
Example #27
0
    def __init__(self):
        CrawlPlugin.__init__(self)

        self.count = 0
        self.loops = 20
Example #28
0
    def __init__(self):
        CrawlPlugin.__init__(self)

        self._captchas_found = DiskSet()
Example #29
0
    def __init__(self):
        CrawlPlugin.__init__(self)

        # Internal variables
        self._already_tested = ScalableBloomFilter()
Example #30
0
    def __init__(self):
        CrawlPlugin.__init__(self)

        # Internal variables
        self._analyzed_dirs = ScalableBloomFilter()
Example #31
0
    def __init__(self):
        CrawlPlugin.__init__(self)

        # User defined parameters
        self._wordnet_results = 5
    def __init__(self):
        CrawlPlugin.__init__(self)

        # Internal variables
        self._exec = True
    def __init__(self):
        CrawlPlugin.__init__(self)

        # Internal variables
        self._exec = True
Example #34
0
    def __init__(self):
        CrawlPlugin.__init__(self)

        self.count = 0
        self.loops = 20
Example #35
0
    def __init__(self):
        CrawlPlugin.__init__(self)

        # User variables
        self._result_limit = 300
Example #36
0
    def __init__(self):
        CrawlPlugin.__init__(self)

        # User configured parameters
        self._input_csv = ''
        self._input_burp = ''
Example #37
0
    def __init__(self):
        CrawlPlugin.__init__(self)

        # User defined parameters
        self._wordnet_results = 5
Example #38
0
    def __init__(self):
        CrawlPlugin.__init__(self)

        # Internal variables
        self._analyzed_dirs = DiskSet()
        self._has_audited = 0
Example #39
0
    def __init__(self):
        CrawlPlugin.__init__(self)

        # Internal variables
        self._already_tested = ScalableBloomFilter()
Example #40
0
    def __init__(self):
        CrawlPlugin.__init__(self)

        self._captchas_found = DiskSet()
Example #41
0
    def __init__(self):
        CrawlPlugin.__init__(self)

        # User configured parameters
        self._input_csv = ''
        self._input_burp = ''
Example #42
0
    def __init__(self):
        CrawlPlugin.__init__(self)

        # User variables
        self._result_limit = 300
Example #43
0
    def __init__(self):
        CrawlPlugin.__init__(self)

        # Internal variables
        self._analyzed_dirs = ScalableBloomFilter()
Example #44
0
    def __init__(self):
        CrawlPlugin.__init__(self)

        # Internal variables
        self._analyzed_dirs = DiskSet()
        self._has_audited = 0
Example #45
0
    def __init__(self):
        CrawlPlugin.__init__(self)

        self._first_time = True
        self._fuzz_images = False
        self._seen = ScalableBloomFilter()