def test_latest_update(): # Get the lastest file lastest_technologies_file=requests.get('https://raw.githubusercontent.com/AliasIO/wappalyzer/master/src/technologies.json') # Write the content to a tmp file with open('/tmp/lastest_technologies_file.json', 'w') as t_file: t_file.write(lastest_technologies_file.text) # Create Wappalyzer with this file in argument wappalyzer1=Wappalyzer.latest(technologies_file='/tmp/lastest_technologies_file.json') wappalyzer2=Wappalyzer.latest(update=True) assert wappalyzer1.technologies==wappalyzer2.technologies assert wappalyzer1.categories==wappalyzer2.categories
def analyze_url(host, parameters): """ Analyzes an URL using wappalyzer and prints the results. """ auth = None url = host verify = True proxies = {} if not urlparse.urlparse(url).scheme: url = 'http://{0}'.format(url) wappalyzer = Wappalyzer.latest() if parameters['username'] and parameters['password']: if parameters['digest']: auth = HTTPDigestAuth(parameters['username'], parameters['password']) else: auth = HTTPBasicAuth(parameters['username'], parameters['password']) if parameters['proxy']: proxies['http'] = parameters['proxy'] proxies['https'] = parameters['proxy'] if parameters.has_key('no_validate'): requests.packages.urllib3.disable_warnings() verify = False try: page = requests.get(url, auth=auth, proxies=proxies, verify=verify) if page.status_code == 200: webpage = WebPage(url, page.text, page.headers) print('[+] {0} {1}'.format(host, wappalyzer.analyze(webpage))) else: print('[-] Got HTTP status code {0} - cannot analyze that...'. format(page.status_code)) except requests.exceptions.ConnectionError as exception: print('[-] Connection error: {0}'.format(exception)) sys.stdout.flush()
def wappalyzer(self, target, verbose=False): """ All verified subdomains are scanned with Wappalyzer to find out the technology stack used in each of them. Once wappalyzer is run, it prints out all verified domains """ print("\n" + self.Y + "[i] Verified and Analyzed Subdomains: \n") wappalyzer = Wappalyzer.latest() # Tech stack db which contains the tech stack of all the sub domains collection = self.dbname['tech_stack'] collection.create_index('domain', unique=True) count = self.dbname.collection.count() for url in self.verified_domains: try: webpage = WebPage.new_from_url('http://' + url, verify=False) tech_stack = wappalyzer.analyze(webpage) if tech_stack and verbose: print(self.G + "[i] URL: " + url) print(self.B + "[i] Wappalyzer: " + str(list(tech_stack)) + "\n") # Push the above data to DB data = {"id": count+1, "domain": url, "time": datetime.now()} data["parent"] = target data['tech_stack'] = list(tech_stack) dataid = collection.insert(data) count += 1 except Exception as e: continue return
def extract_used_techs(url): # webpage = urlopen(url, 60) # content = webpage.read().decode('utf-8') # headers = dict(webpage.getheaders()) # webpage = WebPage(url, html=content, headers=headers)#.new_from_url(url, verify=False) webpage = WebPage.new_from_url(url, verify=False) wappalyzer = Wappalyzer.latest() return wappalyzer.analyze_with_versions_and_categories(webpage)
def analyze(uplist): results = [] for host in goodlist: wappalyzer = Wappalyzer.latest() webpage = WebPage.new_from_url('https://' + host) data = wappalyzer.analyze_with_versions_and_categories(webpage) results.append(data) return results
def main(): wap = Wappalyzer.latest() try: web = WebPage.new_from_url("https://www.botlabco.ga/") tecnologias = wap.analyze(web) for t in tecnologias: print("Tecnologia detectada: {}".format(t)) except: print("Ha ocurrido un error")
def run(self): parser = argparse.ArgumentParser() parser.add_argument('-t', action = "store", #stored dest = "target", help = "for example: ./wp.py -t site.com") args = parser.parse_args() self.site = args.target if self.site is None: sys.exit('Url is empty') if self.site: print '\n--------------------------------------------' print("# Determining target {}".format(self.site)) print '--------------------------------------------' try: wappalyzer = Wappalyzer.latest() if self.site.startswith('http://') == False: self.site = ''.join(('http://',self.site)) webpage = WebPage.new_from_url(self.site) analyze = wappalyzer.analyze(webpage) for components in analyze: print("> {}".format(components)) print '\n--------------------------------------------' except requests.exceptions.Timeout: print("Warning: warning website is unreachable") print '--------------------------------------------' except requests.exceptions.ConnectionError: print 'Name or service not known' print '--------------------------------------------' except KeyboardInterrupt: print("Why man ?")
def main(): wap = Wappalyzer.latest() try: web = WebPage.new_from_url("https://www.example.com") tec = wap.analyze(web) for t in tec: print("Detectada: {}".format(t)) except: print("Error 404")
def test_latest(self): analyzer = Wappalyzer.latest() print(analyzer.categories) self.assertEquals(analyzer.categories['1'], { 'name': 'CMS', 'priority': 1 }) self.assertIn('Apache', analyzer.apps)
def run_wappalyze(self, domain): analyzer = Wappalyzer.latest() webpage = WebPage.new_from_url(domain) analyze_result = analyzer.analyze(webpage) if analyze_result: for result in analyze_result: log.console_log(result) else: log.console_log("Result Not Found")
def main(): wap = Wappalyzer.latest() try: # web = WebPage.new_from_url("https://www.example.com") web = WebPage.new_from_url("https://200code.tech") tecnologias = wap.analyze(web) for t in tecnologias: print("Tecnologia detectada: {}".format(t)) except: print("Ha ocurrido un error")
def test_latest_update(tmp_path: Path): # Get the lastest file lastest_technologies_file = requests.get( 'https://raw.githubusercontent.com/AliasIO/wappalyzer/master/src/technologies.json' ) tmp_file = tmp_path.joinpath('technologies.json') # Write the content to a tmp file with tmp_file.open('w', encoding='utf-8') as t_file: t_file.write(lastest_technologies_file.text) # Create Wappalyzer with this file in argument wappalyzer1 = Wappalyzer.latest(technologies_file=str(tmp_file)) wappalyzer2 = Wappalyzer.latest(update=True) assert wappalyzer1.technologies == wappalyzer2.technologies assert wappalyzer1.categories == wappalyzer2.categories
def wappalyzeit(domain): wappalyzer = Wappalyzer.latest() webpage = WebPage.new_from_url(domain) set1 = wappalyzer.analyze(webpage) if set1: print "[+] Third party libraries in Use:" for s in set1: print s else: print "\t\t\t[-] Nothing found. Make sure domain name is passed properly"
def main(): wap = Wappalyzer.latest() try: web = WebPage.new_from_url( "https://curso--python-0-pruebas.000webhostapp.com/") tecnologias = wap.analyze(web) for t in tecnologias: print("Tecnologia detectada: {}".format(t)) except: print("Ha ocurrido un error")
def main(): wap = Wappalyzer.latest() try: web = WebPage.new_from_url('https://www.example.com') tecnologias = wap.analyze(web) #este es un tipo de dato de lista for t in tecnologias: print(f'Tecnología detectada: {t}') except: print('Ha ocurrido un error!')
def main(): if parser.page: wap = Wappalyzer.latest() try: web = WebPage.new_from_url(URL) tecnologias = wap.analyze(web) for t in tecnologias: print('Tecnologia detectada: {}'.format(t)) except: print('Ha ocurrido un error')
def __init__(self, verbose=False, wappalyzerpath=None, wappalyzerargs=None, python=False): if not wappalyzerpath: if shutil.which("wappalyzer"): wappalyzerpath = ['wappalyzer'] elif shutil.which("docker"): # Test if docker image is installed o = subprocess.run(args=['docker', 'image', 'ls'], stdout=subprocess.PIPE) if 'wappalyzer/cli' not in o.stdout.decode(): self.wappalyzerpath = None else: self.wappalyzerpath = [ 'docker', 'run', '--rm', 'wappalyzer/cli' ] else: self.wappalyzerpath = None else: self.wappalyzerpath = shlex.split(wappalyzerpath) if not self.wappalyzerpath: self.wappalyzerargs = None self.python = True elif python: self.python = True else: self.wappalyzerargs = shlex.split( wappalyzerargs) if wappalyzerargs else [] self.python = False self.verbose = verbose if self.python: print("Using python-Wappalyzer") try: from Wappalyzer import Wappalyzer, WebPage self.webpage = WebPage.new_from_url self.wappalyzer = Wappalyzer.latest() except ImportError: print("Please install python-Wappalyzer") exit(1) else: print("Using Wappalyzer CLI: {}".format(' '.join( self.wappalyzerpath))) self.results = []
def useWappalyzer(url): try: wappalyzer = Wappalyzer.latest() webpage = WebPage.new_from_url(url) webprints = wappalyzer.analyze(webpage) if len(webprints) > 0: return list(webprints) else: return {} except Exception as e: print(e)
def wappalyzeit(domain): print colored(style.BOLD + '---> Wapplyzing web page:\n' + style.END, 'blue') time.sleep(0.3) wappalyzer = Wappalyzer.latest() webpage = WebPage.new_from_url(domain) set1 = wappalyzer.analyze(webpage) if set1: print "[+] Third party libraries in Use:" for s in set1: print s else: print "\t\t\t[-] Nothing found. Make sure domain name is passed properly"
def wappalyzer_detection(): # pretty print the output (set; need to change to dict) target = "" if request.form.get('target'): target = request.form.get('target') req = requests.get('http://' + target) if req.status_code == 200: wappalyzer = Wappalyzer.latest() webpage = WebPage.new_from_url('https://' + target) output = wappalyzer.analyze(webpage) return render_template('detection.html', target=output) else: req = requests.get('https://' + target) wappalyzer = Wappalyzer.latest() webpage = WebPage.new_from_url('https://' + target) output = wappalyzer.analyze(webpage) return render_template('detection.html', target=output) else: return render_template('detection.html')
def main(): if parser.target: wap = Wappalyzer.latest() try: web = WebPage.new_from_url(parser.target) tecnologias = wap.analyze(web) for tecnologia in tecnologias: print("Tecnología detectada: {}".format(tecnologia)) except: print("Ha ocurrido un error") else: print("Imposible analizar el objetivo")
def services(subdomain): found_services = [] # celery will feed result from subdomains scan to this. for subDomain in subdomain[0]: wappalyzer = Wappalyzer.latest() try: webpage = WebPage.new_from_url('http://' + subDomain) found_services.append(list(wappalyzer.analyze(webpage))) except: error_array = ['No Service Detected - Error'] found_services.append(error_array) return found_services
def wappalyzeit(domain): temp_list = [] time.sleep(0.3) wappalyzer = Wappalyzer.latest() webpage = WebPage.new_from_url(domain) set1 = wappalyzer.analyze(webpage) if set1: for s in set1: temp_list.append("\t%s" % s) return temp_list else: return temp_list
def main(): wap = Wappalyzer.latest() try: web = WebPage.new_from_url("https://prod.senasica.gob.mx/sisia/login" ) #Se pone la url a scanear tecno = wap.analyze(web) Categorias = wap.analyze_with_categories(web) for t in tecno: print("Tecnologias Detectadas son: {}".format(t)) for c in Categorias: print("Categorias Detectadas: {}".format(c)) except: print("Ha ocurriod un error")
def main(): wap = Wappalyzer.latest() try: if parser.address: web = WebPage.new_from_url(parser.address) # URL de la web que se va a escanear tecnologias = wap.analyze(web) # Dar un formato a la respuesta en forma de lista for t in tecnologias: cprint('Tecnología detectada:', 'yellow', end=' ') print(t) else: cprint('\nNecesito una dirección web', 'red', end='\n\n') except: cprint('\nHa ocurrido un error', 'red', end='\n\n')
def Tech_used(host_name): output = "\n" try: webpage = WebPage.new_from_url(f'http://{host_name}') wappalyzer = Wappalyzer.latest() details = wappalyzer.analyze_with_categories(webpage) for d in details: for i in details[d]['categories']: output += f"{i} : {d}\n" except Exception as e: output += "Sorry, The website SSL certificate could not be verified!\n" return output #Have to install package python-Wappalyzer
def wappalyzeit(domain): temp_list = [] time.sleep(0.3) wappalyzer = Wappalyzer.latest() webpage = WebPage.new_from_url(domain) set1 = wappalyzer.analyze(webpage) if set1: print "[+] Third party libraries in Use:" for s in set1: temp_list.append("\t%s" % s) print "\t%s" % s return temp_list else: print "\t\t\t[-] Nothing found. Make sure domain name is passed properly" return temp_list
def __init__(self, verbose=False, wappalyzerpath=None, wappalyzerargs=None, python=False): if not wappalyzerpath: if shutil.which("wappalyzer"): wappalyzerpath = [ 'wappalyzer' ] elif shutil.which("docker"): # Test if docker image is installed o = subprocess.run( args=[ 'docker', 'image', 'ls' ], stdout=subprocess.PIPE ) if 'wappalyzer/cli' not in o.stdout.decode() : self.wappalyzerpath = None else: self.wappalyzerpath = [ 'docker', 'run', '--rm', 'wappalyzer/cli' ] else: self.wappalyzerpath = None else: self.wappalyzerpath = shlex.split(wappalyzerpath) if not self.wappalyzerpath : self.wappalyzerargs = None self.python = True elif python: self.python = True else: self.wappalyzerargs = shlex.split(wappalyzerargs) if wappalyzerargs else [] self.python = False self.verbose = verbose if self.python: print("Using python-Wappalyzer") try: from Wappalyzer import Wappalyzer, WebPage self.webpage=WebPage.new_from_url lastest_technologies_file = requests.get('https://raw.githubusercontent.com/AliasIO/wappalyzer/master/src/technologies.json') with open('/tmp/lastest_technologies_file.json', 'w') as t_file: t_file.write(lastest_technologies_file.text) self.wappalyzer = Wappalyzer.latest(technologies_file='/tmp/lastest_technologies_file.json') except ImportError: print("Please install python-Wappalyzer") exit(1) else: print("Using Wappalyzer CLI: {}".format(' '.join(self.wappalyzerpath))) self.results = []
def test_latest(self): HTTPretty.register_uri(HTTPretty.GET, 'https://raw.github.com/ElbertF/Wappalyzer/master/share/apps.json', body=""" { "categories": { "foo": "bar" }, "apps": { "blee": {} } } """) analyzer = Wappalyzer.latest() self.assertEquals(analyzer.categories['foo'], 'bar') self.assertIn('blee', analyzer.apps)
def wappalyzeit(domain, taskId): try: wappalyzer = Wappalyzer.latest() odomain = "http://%s" % domain webpage = WebPage.new_from_url(odomain) set1 = wappalyzer.analyze(webpage) wap = [] if set1: print "[+] Third party libraries in Use:" for s in set1: wap.append(s) else: print "\t\t\t[-] Nothing found. Make sure domain name is passed properly" save_record(domain, taskId, "WapAlyzer", wap) return wap except: return []
def get_alyzer_res(self, level, url: str): """ 这里去获取结果 :param level: :param url: :return: """ # 为url添加头 target = url if not (url.startswith("https://") or url.startswith("http://")): target = "http://" + url # -w是ms单位,即超过那个时间后就不再继续搞了 try: # 只初始化一个wappalyzer wappalyzer = Wappalyzer.latest() webpage = WebPage.new_from_url(target, verify=False) info = wappalyzer.analyze_with_versions_and_categories(webpage) if not isinstance(info, dict) or info.__len__() <= 0: return for k, v in info.items(): name = k versions = v.get("versions", []) categories = v.get("categories", []) if name is None or name == "": continue for i in range(len(categories)): ctname = categories[i] if ctname.lower() == "cms": self._logger.debug("Start CMS ver detection: {}".format(target)) ver = self._recognize_cms_ver(target, name) if ver is not None: version = ver self._logger.debug( "Got cms version: {}:{}".format(name, version) ) com = Component(self.task, level, name) com.category = ctname com.url = target if len(versions) >= i + 1: com.ver = versions[i] yield com except Exception as errs: self._logger.error(f"Wappaylyzer found nothing\nerr:{errs}")
def test_latest(self): HTTPretty.register_uri( HTTPretty.GET, 'https://raw.github.com/ElbertF/Wappalyzer/master/share/apps.json', body=""" { "categories": { "foo": "bar" }, "apps": { "blee": {} } } """) analyzer = Wappalyzer.latest() self.assertEquals(analyzer.categories['foo'], 'bar') self.assertIn('blee', analyzer.apps)
def wappalyze(rid): r = None url = None content = None headers = {} try: r = Resource.objects.get(pk=rid) if r.url: url = r.url.url if r.content: content = r.content.content if r.headers: h = ast.literal_eval(r.headers) for k,v in h.items(): headers[k] = v except Exception as e: logger.error(str(e)) #return None if url and content and headers: try: wappalyzer = Wappalyzer.latest() webpage = WebPage( url=url, html=content, headers=headers, ) apps = wappalyzer.analyze(webpage) logger.debug(apps) for a in apps: webapp, created = Webapp.objects.get_or_create( name = a, ) r.webapp.add(webapp) r.save() #return r except Exception as e: logger.error(str(e)) return rid
def test_latest(self): analyzer = Wappalyzer.latest() print(analyzer.categories) self.assertEquals(analyzer.categories['1'], 'CMS') self.assertIn('Apache', analyzer.apps)
__author__ = 'js' from bs4 import BeautifulSoup from urlparse import urlparse import requests,socket,re from Wappalyzer import Wappalyzer,WebPage from multiprocessing import Pool,Manager,cpu_count wappalyzer = Wappalyzer.latest() def getlist(file): """ Get target list :param file:A file contern the target list. :return: """ with open(file,"r") as ft: tl = ft.readlines() tl = [urlparse(url).hostname.strip() for url in tl] return tl def getIP(url): """ Get IP info :param tl: the list of target :return: """ return socket.gethostbyname(url)