Пример #1
0
    def __init__(self,
                 root_url,
                 scope="folder",
                 session_dir=None,
                 config_dir=None):
        self.target_url = root_url
        self.server = urlparse(root_url).netloc
        self.crawler = crawler.Crawler(root_url)

        self.target_scope = scope
        if scope == "page":
            self.crawler.scope = crawler.Scope.PAGE
        elif scope == "folder":
            self.crawler.scope = crawler.Scope.FOLDER
        elif scope == "domain":
            self.crawler.scope = crawler.Scope.DOMAIN
        elif scope == "punk":
            self.crawler.scope = crawler.Scope.PUNK
        else:
            self.crawler.scope = crawler.Scope.URL

        self.report_gen = None
        self.report_generator_type = "html"
        self.output_file = ""

        self.urls = []
        self.forms = []
        self.attacks = []

        self.color = 0
        self.verbose = 0
        self.module_options = None
        self.attack_options = {}
        self._start_urls = deque([self.target_url])
        self._excluded_urls = []
        self._bad_params = set()
        self._max_depth = 40
        self._max_links_per_page = 0
        self._max_files_per_dir = 0
        self._scan_force = "normal"
        self._max_scan_time = 0
        self._max_attack_time = 0
        self._bug_report = True

        if session_dir:
            SqlitePersister.CRAWLER_DATA_DIR = session_dir

        if config_dir:
            SqlitePersister.CONFIG_DIR = config_dir

        self._history_file = os.path.join(
            SqlitePersister.CRAWLER_DATA_DIR, "{}_{}_{}.db".format(
                self.server.replace(':', '_'), self.target_scope,
                md5(root_url.encode(errors="replace")).hexdigest()[:8]))

        if not os.path.isdir(SqlitePersister.CRAWLER_DATA_DIR):
            os.makedirs(SqlitePersister.CRAWLER_DATA_DIR)

        self.persister = SqlitePersister(self._history_file)
Пример #2
0
    def __init__(self, root_url, scope="folder"):
        self.target_url = root_url
        self.server = urlparse(root_url).netloc
        self.crawler = crawler.Crawler(root_url)

        self.target_scope = scope
        if scope == "page":
            self.crawler.scope = crawler.Scope.PAGE
        elif scope == "folder":
            self.crawler.scope = crawler.Scope.FOLDER
        elif scope == "domain":
            self.crawler.scope = crawler.Scope.DOMAIN
        else:
            self.crawler.scope = crawler.Scope.URL

        self.report_gen = None
        self.report_generator_type = "html"
        self.xml_rep_gen_parser = ReportGeneratorsXMLParser()
        self.xml_rep_gen_parser.parse(
            os.path.join(CONF_DIR, "config", "reports", "generators.xml"))
        self.output_file = ""

        self.urls = []
        self.forms = []
        self.attacks = []

        self.color = 0
        self.verbose = 0
        self.module_options = None
        self.attack_options = {}
        self._start_urls = deque([self.target_url])
        self._excluded_urls = []
        self._bad_params = set()
        self._max_depth = 40
        self._max_links_per_page = -1
        self._max_files_per_dir = 0
        self._scan_force = "normal"
        self._max_scan_time = 0
        history_file = os.path.join(
            SqlitePersister.CRAWLER_DATA_DIR, "{}_{}_{}.db".format(
                self.server.replace(':', '_'), self.target_scope,
                md5(root_url.encode(errors="replace")).hexdigest()[:8]))
        self._bug_report = True

        self.persister = SqlitePersister(history_file)