def _prepare(self): self._result = {} if self.args['pathfile']: try: filelist=open(os.path.expanduser(self.args['pathfile']),'r').read().splitlines() except: raise ProbeException(self.name, "Error opening path list \'%s\'" % self.args['pathfile']) elif self.args['pathlist']: filelist = self.args['pathlist'] elif self.args['auto_home']: filelist = self.common_files['home'] elif self.args['auto_web']: filelist = self.common_files['web'] else: filelist = self.common_files['web'] + self.common_files['home'] result = self.support_vectors.get('users').execute() if not result: raise ProbeException(self.name, 'Cant extract system users') self.args['paths'] = [] for u in result: for f in filelist: self.args['paths'].append('/' + join_abs_paths([result[u].home, f]) )
def _prepare(self): self._result = {} if self.args['pathfile']: try: filelist = open(os.path.expanduser(self.args['pathfile']), 'r').read().splitlines() except: raise ProbeException( self.name, "Error opening path list \'%s\'" % self.args['pathfile']) elif self.args['pathlist']: filelist = self.args['pathlist'] elif self.args['auto_home']: filelist = self.common_files['home'] elif self.args['auto_web']: filelist = self.common_files['web'] else: filelist = self.common_files['web'] + self.common_files['home'] result = self.support_vectors.get('users').execute() if not result: raise ProbeException(self.name, 'Cant extract system users') self.args['paths'] = [] for u in result: for f in filelist: self.args['paths'].append('/' + join_abs_paths([result[u].home, f]))
raise ProbeException(self.name, '\'%s\': %s' % (self.args['baseurl'], WARN_NOT_URL) ) url = self.args['url'] baseurl = self.args['baseurl'] rpath = self.args['rpath'] urls = [] try: crawler = Crawler(url, self.args['depth'], '', '') crawler.crawl() except Exception, e: raise ProbeException(self.name, "%s: %s" % (ERR_CRAWLER_EXCEPT, str(e))) else: urls = set(crawler.visited_links.union(crawler.urls_seen)) # If no url, or the only one is the specified one if not urls or (urls and len(urls) == 1 and list(urls)[0] == url): raise ProbeException(self.name, WARN_CRAWLER_NO_URLS ) self.args['paths'] = [] for path in urls: self.args['paths'].append('/' + join_abs_paths([rpath, path[len(baseurl):]])) def _probe(self): self._result = self.support_vectors.get('enum').execute({'pathlist' : str(self.args['paths']) })
raise ProbeException(self.name, "'%s': %s" % (self.args["baseurl"], WARN_NOT_URL)) url = self.args["url"] baseurl = self.args["baseurl"] rpath = self.args["rpath"] urls = [] try: crawler = Crawler(url, self.args["depth"], "", "") crawler.crawl() except ModuleException, e: raise except Exception, e: raise ProbeException(self.name, "%s: %s" % (ERR_CRAWLER_EXCEPT, str(e))) else: urls = set(crawler.visited_links.union(crawler.urls_seen)) # If no url, or the only one is the specified one if not urls or (urls and len(urls) == 1 and list(urls)[0] == url): raise ProbeException(self.name, WARN_CRAWLER_NO_URLS) self.args["paths"] = [] for path in urls: self.args["paths"].append("/" + join_abs_paths([rpath, path[len(baseurl) :]])) def _probe(self): self._result = self.support_vectors.get("enum").execute({"pathlist": str(self.args["paths"])})
baseurl = self.args['baseurl'] rpath = self.args['rpath'] urls = [] try: crawler = Crawler(url, self.args['depth'], '', '') crawler.crawl() except ModuleException, e: raise except Exception, e: raise ProbeException( self.name, "%s: %s" % (ERR_CRAWLER_EXCEPT, str(e))) else: urls = set(crawler.visited_links.union(crawler.urls_seen)) # If no url, or the only one is the specified one if not urls or (urls and len(urls) == 1 and list(urls)[0] == url): raise ProbeException(self.name, WARN_CRAWLER_NO_URLS) self.args['paths'] = [] for path in urls: self.args['paths'].append( '/' + join_abs_paths([rpath, path[len(baseurl):]])) def _probe(self): self._result = self.support_vectors.get('enum').execute( {'pathlist': str(self.args['paths'])})
url = self.args['url'] baseurl = self.args['baseurl'] rpath = self.args['rpath'] urls = [] try: crawler = Crawler(url, self.args['depth'], '', '') crawler.crawl() except ModuleException, e: raise except Exception, e: raise ProbeException(self.name, "%s: %s" % (ERR_CRAWLER_EXCEPT, str(e))) else: urls = set(crawler.visited_links.union(crawler.urls_seen)) # If no url, or the only one is the specified one if not urls or (urls and len(urls) == 1 and list(urls)[0] == url): raise ProbeException(self.name, WARN_CRAWLER_NO_URLS ) self.args['paths'] = [] for path in urls: self.args['paths'].append('/' + join_abs_paths([rpath, path[len(baseurl):]])) def _probe(self): self._result = self.support_vectors.get('enum').execute({'pathlist' : str(self.args['paths']) })