Ejemplo n.º 1
0
  def __init__(self, root):
    error_str = ""
    self.myls = lswww.lswww(root)
    self.root = self.myls.root
    self.server = urlparse.urlparse(self.root)[1]
    self.myls.verbosity(1)
    socket.setdefaulttimeout(self.timeout)

    # HttpLib2 vars
    proxy = None

    if self.proxy != "":
      (proxy_type, proxy_usr, proxy_pwd, proxy_host, proxy_port,
          path, query, fragment) = httplib2.parse_proxy(self.proxy)
      proxy = httplib2.ProxyInfo(proxy_type, proxy_host, proxy_port,
          proxy_user=proxy_usr, proxy_pass=proxy_pwd)

    self.cookiejar = libcookie.libcookie(self.server)

    self.h = httplib2.Http(cache = None, timeout = self.timeout, proxy_info = proxy)
    self.h.follow_redirects=False

    if self.auth_basic != []:
      self.h.add_credentials(self.auth_basic[0], self.auth_basic[1])
Ejemplo n.º 2
0
    def go(self, crawlerFile):
        proxy = None

        if self.proxy != "":
            (proxy_type, proxy_usr, proxy_pwd, proxy_host, proxy_port,
             path, query, fragment) = httplib2.parse_proxy(self.proxy)
            proxy = httplib2.ProxyInfo(proxy_type, proxy_host, proxy_port,
                                       proxy_user=proxy_usr, proxy_pass=proxy_pwd)

        self.h = httplib2.Http(cache=None, timeout=self.timeout,
                               proxy_info=proxy)
        self.h.follow_redirects = False

        self.cookiejar = libcookie.libcookie(self.server)
        if os.path.isfile(self.cookie):
            self.cookiejar.loadfile(self.cookie)

        if self.auth_basic != []:
            self.h.add_credentials(self.auth_basic[0], self.auth_basic[1])

        # load of the crawler status if a file is passed to it.
        if crawlerFile != None:
            if self.persister.isDataForUrl(crawlerFile) == 1:
                self.persister.loadXML(crawlerFile)
                self.tobrowse = self.persister.getToBrose()
                # TODO: change xml file for browsed urls
                self.browsed = self.persister.getBrowsed()
                self.forms = self.persister.getForms()
                self.uploads = self.persister.getUploads()
                print _("File") + " " + crawlerFile + " " + _("loaded, the scan continues") + ":"
                if self.verbose == 2:
                    print " * " + _("URLs to browse")
                    for x in self.tobrowse:
                        print "    + " + x
                    print " * " + _("URLs browsed")
                    for x in self.browsed.keys():
                        print "    + " + x
            else:
                print _("File") + " " + crawlerFile + " " + _("not found, WSP will scan again the web site")

        # while url list isn't empty, continue browsing
        # if the user stop the scan with Ctrl+C, give him all found urls
        # and they are saved in an XML file
        try:
            while len(self.tobrowse) > 0:
                lien = self.tobrowse.pop(0)
                if (lien not in self.browsed.keys() and lien not in self.excluded):
                    headers = self.browse(lien)
                    if headers != {}:
                        if not headers.has_key("link_encoding"):
                            if self.link_encoding.has_key(lien):
                                headers["link_encoding"] = self.link_encoding[lien]
                        self.browsed[lien] = headers
                        if self.verbose == 1:
                            sys.stderr.write('.')
                        elif self.verbose == 2:
                            print lien
                if (self.scope == self.SCOPE_PAGE):
                    self.tobrowse = []
            self.saveCrawlerData()
            print ""
            print " " + _("Notice") + " "
            print "========"
            print _(
                "This scan has been saved in the file") + " " + self.persister.CRAWLER_DATA_DIR + '/' + self.server + ".xml"
            print _("You can use it to perform attacks without scanning again the web site with the \"-k\" parameter")
        except KeyboardInterrupt:
            self.saveCrawlerData()
            print ""
            print " " + _("Notice") + " "
            print "========"
            print _(
                "Scan stopped, the data has been saved in the file") + " " + self.persister.CRAWLER_DATA_DIR + '/' + self.server + ".xml"
            print _("To continue this scan, you should launch WSP with the \"-i\" parameter")
            pass