Ejemplo n.º 1
0
def main():
    try:

        opts, args = parse_options()

        url = args[0]

        if opts.links:
            getLinks(url)
            raise SystemExit, 0

        depth = int(opts.depth)

        print >> sys.stderr, "Crawling %s (Max Depth: %d)" % (url, depth)
        crawler = Crawler(url, depth)
        crawler.crawl()
        print >> sys.stderr, "DONE"
        startTime, countLinks, countFollowed = crawler.getStats()
        print >> sys.stderr, "Found %d links, following %d urls in %s+%s:%s:%s" % (
            (
                countLinks,
                countFollowed,
            ) + duration(time.time() - startTime))
        if (countLinks == 0):
            crawled = open('%s/bin/appdata/crawled.txt' % (definepath), 'w')
            print >> crawled, url
            crawled.close()
    except KeyboardInterrupt:
        pass
Ejemplo n.º 2
0
    def uptime(self, source, target, args):
        """Display current uptime and cpu usage

        Syntax: UPTIME
        """

        seconds = time() - self.parent.data["time"]
        uptime = duration(seconds)
        cpu = clock()
        rate = (cpu / seconds) * 100.0

        msg = (
            "Uptime: {0:d}+{1:d}:{2:d}:{3:d} "
            "(CPU: {4:0.2f}s {5:0.2f}%)"
        ).format(*(uptime + (cpu, rate)))

        return msg
Ejemplo n.º 3
0
def main():
   try:
	
	opts, args = parse_options()
	
	url = args[0]
	
        if opts.links:	                
		getLinks(url)              
		raise SystemExit, 0
	
        depth = int(opts.depth)

        print >> sys.stderr, "Crawling %s (Max Depth: %d)" % (
                        url, depth)
        crawler = Crawler(url, depth)
        crawler.crawl()
        print >> sys.stderr, "DONE"
        startTime, countLinks, countFollowed = crawler.getStats()
        print >> sys.stderr, "Found %d links, following %d urls in %s+%s:%s:%s" % ((countLinks, countFollowed,) + duration(time.time() - startTime))
	if (countLinks == 0):
		crawled = open('%s/bin/appdata/crawled.txt' % (definepath), 'w')
		print >> crawled, url
		crawled.close()
   except KeyboardInterrupt: pass