def test_new_from_url(): HTTPretty.register_uri(HTTPretty.GET, 'http://example.com/', body='snerble') webpage = WebPage.new_from_url('http://example.com/') assert webpage.html == 'snerble'
def test_get_analyze_with_categories(): webpage = WebPage('http://example.com', '<html>aaa</html>', {}) categories = { "1": { "name": "cat1", "priority": 1 }, "2": { "name": "cat2", "priority": 1 } } technologies = { 'a': { 'html': 'aaa', 'cats': [1], }, 'b': { 'html': 'bbb', 'cats': [1, 2], } } analyzer = Wappalyzer(categories=categories, technologies=technologies) result = analyzer.analyze_with_categories(webpage) assert result == {"a": {"categories": ["cat1"]}}
def test_analyze_no_apps(self): analyzer = Wappalyzer(categories={}, apps={}) webpage = WebPage('http://example.com', '<html></html>', {}) detected_apps = analyzer.analyze(webpage) self.assertEquals(detected_apps, set())
def test_analyze_no_technologies(): analyzer = Wappalyzer(categories={}, technologies={}) webpage = WebPage('http://example.com', '<html></html>', {}) detected_technologies = analyzer.analyze(webpage) assert detected_technologies == set()
def test_new_from_url(self): HTTPretty.register_uri(HTTPretty.GET, 'http://example.com/', body='snerble') webpage = WebPage.new_from_url('http://example.com/') self.assertEquals(webpage.html, 'snerble')
def wappalyzer(self, target, verbose=False): """ All verified subdomains are scanned with Wappalyzer to find out the technology stack used in each of them. Once wappalyzer is run, it prints out all verified domains """ print("\n" + self.Y + "[i] Verified and Analyzed Subdomains: \n") wappalyzer = Wappalyzer.latest() # Tech stack db which contains the tech stack of all the sub domains collection = self.dbname['tech_stack'] collection.create_index('domain', unique=True) count = self.dbname.collection.count() for url in self.verified_domains: try: webpage = WebPage.new_from_url('http://' + url, verify=False) tech_stack = wappalyzer.analyze(webpage) if tech_stack and verbose: print(self.G + "[i] URL: " + url) print(self.B + "[i] Wappalyzer: " + str(list(tech_stack)) + "\n") # Push the above data to DB data = {"id": count+1, "domain": url, "time": datetime.now()} data["parent"] = target data['tech_stack'] = list(tech_stack) dataid = collection.insert(data) count += 1 except Exception as e: continue return
def run_wappalyze(self, domain): webpage = WebPage.new_from_url(domain) analyze_result = analyzer.analyze(webpage) if analyze_result: for result in analyze_result: log.console_log(result) else: log.console_log("Result Not Found")
def extract_used_techs(url): # webpage = urlopen(url, 60) # content = webpage.read().decode('utf-8') # headers = dict(webpage.getheaders()) # webpage = WebPage(url, html=content, headers=headers)#.new_from_url(url, verify=False) webpage = WebPage.new_from_url(url, verify=False) wappalyzer = Wappalyzer.latest() return wappalyzer.analyze_with_versions_and_categories(webpage)
def analyze(uplist): results = [] for host in goodlist: wappalyzer = Wappalyzer.latest() webpage = WebPage.new_from_url('https://' + host) data = wappalyzer.analyze_with_versions_and_categories(webpage) results.append(data) return results
def GetFinger(self): print("正在获取网站指纹及技术!") try: finger=WebPage(self.url, self.rep).info() return finger except Exception as e: print(e) return "Unknow"
def run(self): parser = argparse.ArgumentParser() parser.add_argument('-t', action = "store", #stored dest = "target", help = "for example: ./wp.py -t site.com") args = parser.parse_args() self.site = args.target if self.site is None: sys.exit('Url is empty') if self.site: print '\n--------------------------------------------' print("# Determining target {}".format(self.site)) print '--------------------------------------------' try: wappalyzer = Wappalyzer.latest() if self.site.startswith('http://') == False: self.site = ''.join(('http://',self.site)) webpage = WebPage.new_from_url(self.site) analyze = wappalyzer.analyze(webpage) for components in analyze: print("> {}".format(components)) print '\n--------------------------------------------' except requests.exceptions.Timeout: print("Warning: warning website is unreachable") print '--------------------------------------------' except requests.exceptions.ConnectionError: print 'Name or service not known' print '--------------------------------------------' except KeyboardInterrupt: print("Why man ?")
def main(): wap = Wappalyzer.latest() try: web = WebPage.new_from_url("https://www.example.com") tec = wap.analyze(web) for t in tec: print("Detectada: {}".format(t)) except: print("Error 404")
def GetFinger(self): redispool.append("runlog", "正在获取{}网站指纹及技术!\n".format(self.url)) print("正在获取网站指纹及技术!") try: finger = WebPage(self.url, self.rep).info() return finger except Exception as e: print(e) return "Unknow"
def main(): wap = Wappalyzer.latest() try: web = WebPage.new_from_url("https://www.botlabco.ga/") tecnologias = wap.analyze(web) for t in tecnologias: print("Tecnologia detectada: {}".format(t)) except: print("Ha ocurrido un error")
def test_pass_request_params(): try: webpage = WebPage.new_from_url('http://example.com/', timeout=0.00001) assert False #"Shoud have triggered TimeoutError" except requests.exceptions.ConnectTimeout: assert True except: assert False #"Shoud have triggered TimeoutError"
def wappalyzeit(domain): wappalyzer = Wappalyzer.latest() webpage = WebPage.new_from_url(domain) set1 = wappalyzer.analyze(webpage) if set1: print "[+] Third party libraries in Use:" for s in set1: print s else: print "\t\t\t[-] Nothing found. Make sure domain name is passed properly"
def main(): wap = Wappalyzer.latest() try: # web = WebPage.new_from_url("https://www.example.com") web = WebPage.new_from_url("https://200code.tech") tecnologias = wap.analyze(web) for t in tecnologias: print("Tecnologia detectada: {}".format(t)) except: print("Ha ocurrido un error")
def main(): wap = Wappalyzer.latest() try: web = WebPage.new_from_url('https://www.example.com') tecnologias = wap.analyze(web) #este es un tipo de dato de lista for t in tecnologias: print(f'Tecnología detectada: {t}') except: print('Ha ocurrido un error!')
def main(): wap = Wappalyzer.latest() try: web = WebPage.new_from_url( "https://curso--python-0-pruebas.000webhostapp.com/") tecnologias = wap.analyze(web) for t in tecnologias: print("Tecnologia detectada: {}".format(t)) except: print("Ha ocurrido un error")
def main(): if parser.page: wap = Wappalyzer.latest() try: web = WebPage.new_from_url(URL) tecnologias = wap.analyze(web) for t in tecnologias: print('Tecnologia detectada: {}'.format(t)) except: print('Ha ocurrido un error')
def useWappalyzer(url): try: wappalyzer = Wappalyzer.latest() webpage = WebPage.new_from_url(url) webprints = wappalyzer.analyze(webpage) if len(webprints) > 0: return list(webprints) else: return {} except Exception as e: print(e)
def wappalyzer_detection(): # pretty print the output (set; need to change to dict) target = "" if request.form.get('target'): target = request.form.get('target') req = requests.get('http://' + target) if req.status_code == 200: wappalyzer = Wappalyzer.latest() webpage = WebPage.new_from_url('https://' + target) output = wappalyzer.analyze(webpage) return render_template('detection.html', target=output) else: req = requests.get('https://' + target) wappalyzer = Wappalyzer.latest() webpage = WebPage.new_from_url('https://' + target) output = wappalyzer.analyze(webpage) return render_template('detection.html', target=output) else: return render_template('detection.html')
def main(): if parser.target: wap = Wappalyzer.latest() try: web = WebPage.new_from_url(parser.target) tecnologias = wap.analyze(web) for tecnologia in tecnologias: print("Tecnología detectada: {}".format(tecnologia)) except: print("Ha ocurrido un error") else: print("Imposible analizar el objetivo")
def check(ig, url): if not url.startswith('http'): url = 'http://' + url try: webpage = WebPage.new_from_url(url) tech = wappalyzer.analyze(webpage) print(yellow+"[+] " + str(url) + end + bold + " | " + green + ", ".join(tech) + end) except Exception as e: if ig == 'True': pass else: print(red+"Error: " + end + "[ " + bold + str(url) + end + " ] | " + str(e))
def services(subdomain): found_services = [] # celery will feed result from subdomains scan to this. for subDomain in subdomain[0]: wappalyzer = Wappalyzer.latest() try: webpage = WebPage.new_from_url('http://' + subDomain) found_services.append(list(wappalyzer.analyze(webpage))) except: error_array = ['No Service Detected - Error'] found_services.append(error_array) return found_services
def wappalyzeit(domain): print colored(style.BOLD + '---> Wapplyzing web page:\n' + style.END, 'blue') time.sleep(0.3) wappalyzer = Wappalyzer.latest() webpage = WebPage.new_from_url(domain) set1 = wappalyzer.analyze(webpage) if set1: print "[+] Third party libraries in Use:" for s in set1: print s else: print "\t\t\t[-] Nothing found. Make sure domain name is passed properly"
def wappalyzeit(domain): temp_list = [] time.sleep(0.3) wappalyzer = Wappalyzer.latest() webpage = WebPage.new_from_url(domain) set1 = wappalyzer.analyze(webpage) if set1: for s in set1: temp_list.append("\t%s" % s) return temp_list else: return temp_list
def test_analyze_with_versions_and_categories_pattern_lists(): webpage = WebPage( 'http://wordpress-example.com', '<html><head><meta name="generator" content="WordPress 5.4.2"></head></html>', {}) categories = { "1": { "name": "CMS", "priority": 1 }, "11": { "name": "Blog", "priority": 1 } } technologies = { "WordPress": { "cats": [1, 11], "html": [], "icon": "WordPress.svg", "implies": ["PHP", "MySQL"], "meta": { "generator": [ "Whatever123", "Whatever456", "^WordPress ?([\\d.]+)?\\;version:\\1", "Whatever" ] }, "website": "https://wordpress.org" }, 'b': { 'html': 'bbb', 'cats': [1, 2], }, "PHP": { "website": "http://php.net" }, "MySQL": { "website": "http://mysql.com" }, } analyzer = Wappalyzer(categories=categories, technologies=technologies) result = analyzer.analyze_with_versions_and_categories(webpage) assert ("WordPress", { "categories": ["CMS", "Blog"], "versions": ["5.4.2"] }) in result.items()
def main(): wap = Wappalyzer.latest() try: web = WebPage.new_from_url("https://prod.senasica.gob.mx/sisia/login" ) #Se pone la url a scanear tecno = wap.analyze(web) Categorias = wap.analyze_with_categories(web) for t in tecno: print("Tecnologias Detectadas son: {}".format(t)) for c in Categorias: print("Categorias Detectadas: {}".format(c)) except: print("Ha ocurriod un error")
def main(): wap = Wappalyzer.latest() try: if parser.address: web = WebPage.new_from_url(parser.address) # URL de la web que se va a escanear tecnologias = wap.analyze(web) # Dar un formato a la respuesta en forma de lista for t in tecnologias: cprint('Tecnología detectada:', 'yellow', end=' ') print(t) else: cprint('\nNecesito una dirección web', 'red', end='\n\n') except: cprint('\nHa ocurrido un error', 'red', end='\n\n')
def wappalyzeit(domain): temp_list = [] time.sleep(0.3) wappalyzer = Wappalyzer.latest() webpage = WebPage.new_from_url(domain) set1 = wappalyzer.analyze(webpage) if set1: print "[+] Third party libraries in Use:" for s in set1: temp_list.append("\t%s" % s) print "\t%s" % s return temp_list else: print "\t\t\t[-] Nothing found. Make sure domain name is passed properly" return temp_list
def wappalyzeit(domain, taskId): try: wappalyzer = Wappalyzer.latest() odomain = "http://%s" % domain webpage = WebPage.new_from_url(odomain) set1 = wappalyzer.analyze(webpage) wap = [] if set1: print "[+] Third party libraries in Use:" for s in set1: wap.append(s) else: print "\t\t\t[-] Nothing found. Make sure domain name is passed properly" save_record(domain, taskId, "WapAlyzer", wap) return wap except: return []
import sys from Wappalyzer import Wappalyzer, WebPage url = sys.argv[1] w = Wappalyzer.latest() webpage = WebPage.new_from_url(url) text = w.analyze(webpage) print text
apps = TextField() # JSON str class Meta: database = db # This model uses the "people.db" database. counter = 0 total = Domain.select().count() for domain in Domain.select().iterator(): counter += 1 if counter % 100 == 0: print counter, '/', total if len(json.loads(domain.apps)) > 0: continue try: webpage = WebPage.new_from_url('http://'+domain.name) except Exception, e: continue domain.apps = json.dumps(list(wappalyzer.analyze(webpage))) domain.save() ######################## create db # db.create_tables([Domain]) # org_data = json.loads(open('../alexa.json').read()) # cats_data = {} # for org_datum in org_data: # domain = org_datum[0] # cat = org_datum[1] # if not cat in cats_data:
def getCms(url): webpage = WebPage.new_from_url(url) return "&".join(wappalyzer.analyze(webpage))