def get_emails(self): Parse = Parser.Parser(self.Html) Parse.genericClean() Parse.urlClean() FinalOutput = Parse.GrepFindEmails() print FinalOutput return FinalOutput
def get_emails(self): Parse = Parser.Parser(self.Html) Parse.genericClean() Parse.urlClean() FinalOutput = Parse.GrepFindEmails() HtmlResults = Parse.BuildResults(FinalOutput,self.name) return FinalOutput, HtmlResults
def get_emails(self): # Make sure you remove any newlines Parse = Parser.Parser(self.results) FinalOutput = Parse.CleanListOutput() HtmlResults = Parse.BuildResults(FinalOutput, self.name) self.logger.debug('EmailHunter completed search') return FinalOutput, HtmlResults
def get_emails(self): Parse = Parser.Parser(self.Text) Parse.genericClean() Parse.urlClean() FinalOutput = Parse.GrepFindEmails() HtmlResults = Parse.BuildResults(FinalOutput, self.name) self.logger.debug('ExaleadPPTXSearch completed search') return FinalOutput, HtmlResults
def get_emails(self): Parse = Parser.Parser(self.Html) Parse.genericClean() Parse.urlClean() FinalOutput = Parse.GrepFindEmails() HtmlResults = Parse.BuildResults(FinalOutput, self.name) self.logger.debug("RedditPostSearch completed search") return FinalOutput, HtmlResults
def get_emails(self): Parse = Parser.Parser(self.Text) Parse.RemoveUnicode() Parse.genericClean() Parse.urlClean() # Unicode issues here: FinalOutput = Parse.GrepFindEmails() HtmlResults = Parse.BuildResults(FinalOutput, self.name) return FinalOutput, HtmlResults
def get_emails(self): Parse = Parser.Parser(self.Text) Parse.genericClean() Parse.urlClean() FinalOutput = Parse.GrepFindEmails() HtmlResults = Parse.BuildResults(FinalOutput, self.name) JsonResults = Parse.BuildJson(FinalOutput, self.name) self.logger.debug("PasteBinSearch completed search") return FinalOutput, HtmlResults, JsonResults
def get_emails(self): # You must report back with parsing errors!!! # in one case I have seen [email protected]:Password # This will break most Reg-Ex Parse = Parser.Parser(self.Html) Parse.genericClean() Parse.urlClean() FinalOutput = Parse.GrepFindEmails() return FinalOutput
def get_emails(self): Parse = Parser.Parser(self.results) Parse.genericClean() Parse.urlClean() FinalOutput = Parse.GrepFindEmails() HtmlResults = Parse.BuildResults(FinalOutput, self.name) JsonResults = Parse.BuildJson(FinalOutput, self.name) self.logger.debug('Whoisology completed search') return FinalOutput, HtmlResults, JsonResults
def get_emails(self): # You must report back with parsing errors!!! # in one case I have seen [email protected]:Password # This will break most Reg-Ex Parse = Parser.Parser(self.Html) Parse.genericClean() Parse.urlClean() FinalOutput = Parse.GrepFindEmails() HtmlResults = Parse.BuildResults(FinalOutput, self.name) self.logger.debug('CanaryBinSearch completed search') return FinalOutput, HtmlResults
def test_paser(): # test parser functions with test data raw = """ alex // test //...dfdfsf data !@#$%^%&^&*() <em>[email protected]</em> <em> [email protected] </em> <tr>[email protected]</tr> <></><><><><><> """ p = Parser.Parser(raw) p.RemoveUnicode() finaloutput, htmlresults = p.extendedclean('test')
def get_emails(self): # Direct location of new dir created during wget output = [] FinalOutput = [] val = "" directory = self.save.replace("--directory-prefix=", "") # directory = "www." + directory # Grep for any data containing "@", sorting out binary files as well # Pass list of Dirs to a regex, and read that path for emails try: if self.retVal > 0: pass else: ps = subprocess.Popen(('grep', '-r', "@", directory), stdout=subprocess.PIPE) # Take in "ps" var and parse it for only email addresses output = [] try: val = subprocess.check_output( ("grep", "-i", "-o", '[A-Z0-9._%+-]\+@[A-Z0-9.-]\+\.[A-Z]\{2,4\}'), stdin=ps.stdout) except Exception as e: pass # Super "hack" since the data returned is from pipeline /n and all # in val if val: with open('temp.txt', "w+") as myfile: myfile.write(str(val)) with open('temp.txt', "r") as myfile: output = myfile.readlines() os.remove('temp.txt') for item in output: FinalOutput.append(item.rstrip("\n")) except Exception as e: print e if self.remove == "yes" or self.remove == "Yes": if not self.retVal > 0: shutil.rmtree(directory) try: shutil.rmtree(directory) except: pass Parse = Parser.Parser(FinalOutput) HtmlResults = Parse.BuildResults(FinalOutput, self.name) JsonResults = Parse.BuildJson(FinalOutput, self.name) return FinalOutput, HtmlResults, JsonResults
def get_emails(self): Parse = Parser.Parser(self.results) FinalOutput = Parse.GrepFindEmails() HtmlResults = Parse.BuildResults(FinalOutput, self.name) return FinalOutput, HtmlResults
def get_emails(self): Parse = Parser.Parser(self.results) Parse.genericClean() Parse.urlClean() FinalOutput = Parse.GrepFindEmails() return FinalOutput
def get_emails(self): Parse = Parser.Parser(self.results) FinalOutput = Parse.GrepFindEmails() return FinalOutput
def get_emails(self): # Make sure you remove any newlines Parse = Parser.Parser(self.results) FinalOutput = Parse.CleanListOutput() HtmlResults = Parse.BuildResults(FinalOutput, self.name) return FinalOutput, HtmlResults
def get_emails(self): # Make sure you remove any newlines Parse = Parser.Parser(self.results) FinalOutput = Parse.CleanListOutput() return FinalOutput
def get_emails(self): Parse = Parser.Parser(self.results) FinalOutput = Parse.GrepFindEmails() HtmlResults = Parse.BuildResults(FinalOutput, self.name) self.logger.debug("SearchPGP completed search") return FinalOutput, HtmlResults
def get_emails(self): parse = Parser.Parser(self.Html) FinalOutput, HtmlResults = parse.extendedclean(self.name) self.logger.debug('AskSearch completed search') return FinalOutput, HtmlResults