from Scraping import Scraping if __name__ == "__main__": url = 'http://t3chfest.uc3m.es' scraping = Scraping() scraping.scrapingImagesPdf(url) scraping.scrapingBeautifulSoup(url)
infoLocation.printRecord(hostname) infoLocation.geoInfo(hostname, ip) if option == "10": f = open('logScraping.txt', 'a') sys.stdout = Log(sys.stdout, f) print '\njpg images' print '--------------' scraping.getImgFromUrl(hostname, 'jpg') print '\npng images' print '--------------' scraping.getImgFromUrl(hostname, 'png') print '\ngif images' print '--------------' scraping.getImgFromUrl(hostname, 'gif') scraping.scrapingImagesPdf(hostname) scraping.scrapingImagesPdf(ip) scraping.scrapingBeautifulSoup(hostname) scraping.scrapingBeautifulSoup(ip) if option == "11": f = open('logCheckHeaders.txt', 'a') sys.stdout = Log(sys.stdout, f) checker.checkHeadersInfoByIp(ip) checker.checkHeadersInfoByHostName(hostname) if option == "12": f = open('logSSHBruteForce.txt', 'a') sys.stdout = Log(sys.stdout, f) sshConnection.SSHBruteForce(hostname) if option == "13": f = open('logFTP.txt', 'a')
sys.stdout = Log(sys.stdout, f) infoLocation.geoInfo(hostname,ip) if option == "10": f = open('logScraping.txt', 'a') sys.stdout = Log(sys.stdout, f) print '\njpg images' print '--------------' scraping.getImgFromUrl(hostname, 'jpg') print '\npng images' print '--------------' scraping.getImgFromUrl(hostname, 'png') print '\ngif images' print '--------------' scraping.getImgFromUrl(hostname, 'gif') scraping.scrapingImagesPdf(hostname) scraping.scrapingImagesPdf(ip) scraping.scrapingBeautifulSoup(hostname) scraping.scrapingBeautifulSoup(ip) if option == "11": f = open('logCheckHeaders.txt', 'a') sys.stdout = Log(sys.stdout, f) checker.checkHeadersInfoByIp(ip) checker.checkHeadersInfoByHostName(hostname) if option == "12": f = open('logSSHBruteForce.txt', 'a') sys.stdout = Log(sys.stdout, f) sshConnection.SSHBruteForce(hostname) if option == "13": f = open('logFTP.txt', 'a')