def get_reverse_from_resolvethem(name): url = 'http://www.resolvethem.com/' html = core.get_html_from_url(url, name) if html: parser = BeautifulSoup(html) data = parser.find('div','inner') ip = re.search("(?:[0-9]{1,3}\.){3}[0-9]{1,3}", data.get_text()) if ip: if re.match("(?:[0-9]{1,3}\.){3}[0-9]{1,3}", ip.group(0)): return ip.group(0) else: return False else: return False
def get_dork_from_exploit_db(value): url = "https://www.exploit-db.com/ghdb/" html = core.get_html_from_url(url + str(value)) if html: parser = BeautifulSoup(html.decode("utf-8"), "html.parser") table = parser.find("table", {"class": "category-list"}) if table != None: data = table.find("a").get_text().strip() if len(data) == 0: return " " return data else: msg = "exploit-db returned error" logger.debug(msg) return False else: msg = "exploit-db returned badly" logger.debug(msg) return False
def get_dork_from_exploit_db(value): url = 'https://www.exploit-db.com/ghdb/' html = core.get_html_from_url(url + str(value)) if html: parser = BeautifulSoup(html.decode('utf-8'), 'html.parser') table = parser.find('table', {'class': 'category-list'}) if table != None: data = table.find('a').get_text().strip() if len(data) == 0: return " " return data else: msg = "exploit-db returned error" logger.debug(msg) return False else: msg = "exploit-db returned badly" logger.debug(msg) return False
def get_from_who_is(value, type): whois = 'http://who.is/whois/' info = 'http://who.is/website-information/' dns = 'http://who.is/dns/' if type == 0: url = whois if type == 1: url = info if type == 2: url = dns rawdata = core.get_html_from_url(url + value) if rawdata: parser = BeautifulSoup(rawdata) blocks = parser.find_all('div', 'domain-data') for block in blocks: title = block.header.h5.get_text() table = block.table if table: logger.info('-----' + title.strip() + '-----') rows = table.find_all('tr') for row in rows: descriptions = row.find_all('th') datas = row.find_all('td') value = '' for description in descriptions: if description.get_text().strip(): value = value + '-' + description.get_text().strip( ) if value: logger.info(value) value = '' for data in datas: if data.get_text().strip(): value = value + ' ' + data.get_text().strip() if value: logger.info(value) else: logger.error('[-] Error: Invalid host given for extensive data')
def get_from_who_is(value, type): whois='http://who.is/whois/' info='http://who.is/website-information/' dns='http://who.is/dns/' if type == 0: url=whois if type == 1: url=info if type == 2: url=dns rawdata = core.get_html_from_url(url+value) if rawdata: parser = BeautifulSoup(rawdata) blocks = parser.find_all('div','domain-data') for block in blocks: title = block.header.h5.get_text() table = block.table if table: logger.info('-----'+title.strip()+'-----') rows = table.find_all('tr') for row in rows: descriptions = row.find_all('th') datas = row.find_all('td') value='' for description in descriptions: if description.get_text().strip(): value = value + '-' + description.get_text().strip() if value: logger.info(value) value='' for data in datas: if data.get_text().strip(): value = value + ' ' + data.get_text().strip() if value: logger.info(value) else: logger.error('[-] Error: Invalid host given for extensive data')