def ConfRead(): ret = 0 intLen = 0 FConf = fileio() FLOG = fileio() data = '' emailalerting = '' emailpassthrough = '' try: #Conf file hardcoded here with open('/opt/micromailer/micromailer.conf', 'r') as read_file: data = json.load(read_file) except: print '[x] Unable to read configuration file! Terminating...' FLOG.WriteLogFile(CON.logfile, '[x] Unable to read configuration file! Terminating...\n') return -1 CON.logfile = data['logfile'] CON.server = data['server'] CON.serverport = data['server_port'] emailpassthrough = data['emailpassthrough'] if (emailpassthrough == 'True'): CON.emailpassthrough = True CON.email = data['email'] CON.password = data['password'] if (CON.debug == True): print '[DEBUG] data: ', data print '[DEBUG] CON.logfile: ' + str(CON.logfile) print '[DEBUG] CON.server: ' + str(CON.server) print '[DEBUG] CON.serverport: ' + str(CON.serverport) print '[DEBUG] CON.emailpassthrough: ' + str(CON.emailpassthrough) print '[DEBUG] CON.email: ' + str(CON.email) print '[DEBUG] CON.password: '******'[x] Please enter a valid sender e-mail password in the micromailer.conf file. Terminating...' FLOG.WriteLogFile(CON.logfile, '[x] Please enter a valid sender e-mail password in the micromailer.conf file. Terminating...\n') print '' return -1 else: print '[*] E-mail passthrough is active, ignoring password...' FLOG.WriteLogFile(CON.logfile, '[*] E-mail passthrough is active, ignoring password...\n') print '[*] Finished configuration successfully.\n' FLOG.WriteLogFile(CON.logfile, '[*] Finished configuration successfully.\n') return 0
def NMapRead(self, filename, debug): FLog = fileio() FLog.ReadFile(filename) tmpport = '' for line in FLog.fileobject: if (line.find('ssl/http') != -1): intFromVal1 = line.find('/') if ((intFromVal1 != -1) and (intFromVal1 <7)): tmpport = line[0:intFromVal1] self.https_data.append(int(tmpport)) self.http_data.append(int(tmpport)) if (debug == True): print 'Port: ' + tmpport else: if (debug == True): print '' tmpport = '' else: if (line.find('http') != -1): intFromVal1 = line.find('/') if ((intFromVal1 != -1) and (intFromVal1 <7)): tmpport = line[0:intFromVal1] self.http_data.append(int(tmpport)) if (debug == True): print 'Port: ' + tmpport else: if (debug == True): print '' tmpport = '' return 0
def ConfRead(): ret = 0 intLen = 0 FConf = fileio() data = '' try: #Conf file hardcoded here with open('/opt/cumulonimbus/cumulonimbus.conf', 'r') as read_file: data = json.load(read_file) except Exception as e: print(e) print('[x] Unable to read configuration file Terminating...\n') return -1 CON.logfile = data['logfile'] CON.cloud_domains = data['cloud_domains'] CON.file_types = data['file_types'] CON.depth = data['depth'] CON.process = data['process'] CON.user_agent = data['user_agent'] if (CON.debug == True): print('[DEBUG] data: ', data) print('[DEBUG] CON.logfile: ' + str(CON.logfile)) print('[DEBUG] CON.cloud_domains: ' + str(CON.cloud_domains)) print('[DEBUG] CON.file_types: ' + str(CON.file_types)) print('[DEBUG] CON.depth: ' + str(CON.depth)) print('[DEBUG] CON.process: ' + str(CON.process)) print('[DEBUG] CON.user_agent: ' + str(CON.user_agent)) #for a_cloud_domains in CON.cloud_domains: # for key, value in a_cloud_domains.items(): # print ('[DEBUG] CON.cloud_domains key: ' + key + ' value: ' + value) #for a_file_types in CON.file_types: # for key, value in a_file_types.items(): # print ('[DEBUG] CON.file_types key: ' + key + ' value: ' + value) if (CON.debug == True): print('[*] Finished configuration.') print('') #Get logging going... if not (os.path.isfile(CON.logfile)): FLOG = open(CON.logfile, "w") FLOG.write('[*] Creating log file...\n') FLOG.close() CON.FLOG = open(CON.logfile, "a") else: CON.FLOG = open(CON.logfile, "a") print('[*] Executing cumulonimbus v0.1... ') CON.FLOG.write('[*] Executing cumulonimbus v0.1...\n') print('[*] Finished configuration successfully.\n') CON.FLOG.write('[*] Finished configuration successfully.\n') return 0
def POE(logdir, target, logging, debug): if (logging == True): LOG = logger() newlogentry = '' dig_output_data = '' output = logdir + 'Dig.txt' FI = fileio() print '\r\n[*] Running Dig against: ' + target.target subproc = subprocess.Popen('dig -t NS ' + target.target, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) for dig_data in subproc.stdout.readlines(): if (debug == True): print '[DEBUG]: ' + dig_data dig_output_data += dig_data try: FI.WriteLogFile(output, dig_output_data) print colored('[*] Dig data had been written to file here: ', 'green') + colored(output, 'blue', attrs=['bold']) if (logging == True): newlogentry = 'Dig data has been generated to file here: <a href=\"' + output + '\"> Dig Output </a>' LOG.WriteLog(logdir, target.target, newlogentry) except: print colored('[x] Unable to write Dig data to file', 'red', attrs=['bold']) if (logging == True): newlogentry = 'Unable to write Dig data to file' LOG.WriteLog(logdir, target.target, newlogentry) return -1 return 0
def ConfRead(self, debug): FConf = fileio() FConf.ReadFile(self.confname) DayVal = int(datetime.datetime.now().strftime("%d")) self.date = datetime.datetime.now().strftime("%Y-%m-%d") if DayVal < 10: self.date_abbr = datetime.datetime.now().strftime("%b %d").lstrip("0").replace(" 0", " ") else: self.date_abbr = datetime.datetime.now().strftime("%b %d").lstrip("0").replace(" 0", " ") if debug == True: print "self.date: " + self.date print "self.date_abbr: " + self.date_abbr for line in FConf.fileobject: if debug == True: print line intLen = len(line) if line.find("logdir") != -1: self.logdir = line[7:intLen] elif line.find("emailrecp") != -1: self.emailrecp = line[10:intLen] elif line.find("emailsend") != -1: self.emailsend = line[10:intLen] elif line.find("useragent") != -1: self.useragent = line[10:intLen] else: if debug == True: print "" if debug == True: print "Finished configuration." print ""
def LogRead(self, debug): FLog = fileio() FLog.ReadFile(self.filename) for line in FLog.fileobject: if (line.find('Bad protocol version identification') != -1): intFromVal1 = line.find('from') if (intFromVal1 != -1): intLen = len(line) intFromVal2 = intFromVal1 + 5 self.data += line[intFromVal2:intLen] if (debug == True): print 'IP: ' + self.data elif (line.find('Did not receive identification string') != -1): intFromVal1 = line.find('from') if (intFromVal1 != -1): intLen = len(line) intFromVal2 = intFromVal1 + 5 self.data += line[intFromVal2:intLen] if (debug == True): print 'IP: ' + self.data elif (line.find('error: connect_to') != -1): intFromVal1 = line.find('_to') if (intFromVal1 != -1): intLen = len(line) intFromVal2 = intFromVal1 + 4 self.data += line[intFromVal2:intLen-17] if (debug == True): print 'IP: ' + self.data else: if (debug == True): print '' return 0
def WriteLog(self, logdir, target, newlogline): FLog = fileio() filename = logdir + target + '.html' data = newlogline + '\n<br/>' FLog.WriteLogFile(filename, data) return 0
def WriteReport(self, logdir, newlogline): FLog = fileio() filename = logdir + 'logroot.html' data = str(newlogline) #+ '\n<br/>' FLog.WriteLogFile(filename, data) return 0
def POE(POE): if (POE.logging == True): LOG = logger() newlogentry = '' dig_output_data = '' output = POE.logdir + 'Dig.txt' FI = fileio() print '\r\n[*] Running Dig against: ' + POE.target subproc = subprocess.Popen('dig -t NS ' + POE.target, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) for dig_data in subproc.stdout.readlines(): if (POE.debug == True): print '[DEBUG]: ' + dig_data dig_output_data += dig_data try: FI.WriteLogFile(output, dig_output_data) print colored('[*] Dig data has been written to file here: ', 'green') + colored(output, 'blue', attrs=['bold']) if (POE.logging == True): newlogentry = 'Dig data has been generated to file here: <a href=\"' + output + '\"> Dig Output </a>' LOG.WriteLog(POE.logdir, POE.target, newlogentry) POE.csv_line += 'True,' except: print colored('[x] Unable to write Dig data to file', 'red', attrs=['bold']) if (POE.logging == True): newlogentry = 'Unable to write Dig data to file' LOG.WriteLog(POE.logdir, POE.target, newlogentry) POE.csv_line += 'False,' return -1 return 0
def SSH_Connection(self, sockAddr, target, port, filename): data = '' print 'Attempting to pull host key(s)...' error = 'Unable to pull host key(s)' subproc = subprocess.Popen('ssh-keyscan -p '+str(port)+' -t rsa,dsa '+target, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) for banner in subproc.stdout.readlines(): data += banner print banner, print '' if (len(banner) < 2): print 'Error: %s. ' %(error) else: if (len(filename) > 0): print 'Attempting to write host key(s) to file...' FIO = fileio() FIO.WriteFile(filename, data) print '' banner = '' print 'Attempting to generate fingerprint...' error = 'Unable to generate fingerprint' subproc = subprocess.Popen('ssh-keygen -l -F '+target, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) for banner in subproc.stdout.readlines(): print banner, print '' if (len(banner) < 2): print 'Error: %s. ' %(error) print '' return 0
def ConfRead(self, debug): FConf = fileio() FConf.ReadFile(self.confname) for line in FConf.fileobject: if (debug == True): print line intLen = len(line) if (line.find('logdir') != -1): self.logdir = line[7:intLen] if (debug == True): print 'logdir:' + self.logdir print '' elif (line.find('useragent') != -1): self.useragent = line[10:intLen] if (debug == True): print 'useragent:' + self.useragent print '' elif (line.find('apikey') != -1): self.apikey = line[7:intLen] if (debug == True): print 'apikey:' + self.apikey print '' else: if (debug == True): print '' if (debug == True): print 'Finished configuration.' print ''
def TargetRead(): FConf = fileio() try: #Conf file hardcoded here FConf.ReadFile(CON.targetlist) except: print '[x] Unable to read target file: ' + CON.targetlist print colored('[x] Unable to read target file: ' + CON.targetlist, 'red', attrs=['bold']) return -1 for line in FConf.fileobject: CON.listoftargets.append(line) if (CON.debug == True): print '[DEBUG]: ' + line CON.targetlistsize = len(CON.listoftargets) print '[*] Finished reading target file.' print '[*] Target file size: ' + str(CON.targetlistsize) + ' entries.' print '' return 0
def LogRead(self, debug): FLog = fileio() FLog.ReadFile(self.filename) for line in FLog.fileobject: if line.find(self.date_abbr) != -1: if line.find("Bad protocol version identification") != -1: intFromVal1 = line.find("from") if intFromVal1 != -1: intLen = len(line) intFromVal2 = intFromVal1 + 5 self.data += line[intFromVal2:intLen] self.log_line += line + "*" if debug == True: print "IP: " + self.data print "Log line: " + self.log_line elif line.find("Did not receive identification string") != -1: intFromVal1 = line.find("from") if intFromVal1 != -1: intLen = len(line) intFromVal2 = intFromVal1 + 5 self.data += line[intFromVal2:intLen] self.log_line += line + "*" if debug == True: print "IP: " + self.data print "Log line: " + self.log_line else: if debug == True: print "" return 0
def POE(logdir, target, logging, debug): if (logging == True): LOG = logger() newlogentry = '' strings_dump = '' strings_output_data = '' output = logdir + 'PDFParse.txt' FI = fileio() if (logging == True): newlogentry = 'Running pdf-parse against: <strong>' + target.filename + '</strong>' LOG.WriteLog(logdir, target.filename, newlogentry) subproc = subprocess.Popen('/opt/static/pdf-parser.py -c ' + target.filename, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) for strings_data in subproc.stdout.readlines(): strings_output_data += strings_data if (debug == True): print strings_data try: FI.WriteLogFile(output, strings_output_data) print '[*] PDF Parse data had been written to file here: ' + output if (logging == True): newlogentry = 'PDF Parse data has been generated to file here: <a href=\"' + output + '\"> PDF Parse Output </a>' LOG.WriteLog(logdir, target.filename, newlogentry) except: print '[x] Unable to write PDF Parse data to file' if (logging == True): newlogentry = 'Unable to write PDF Parse data to file' LOG.WriteLog(logdir, target.filename, newlogentry) return -1 return 0
def Strings(): newlogentry = '' strings_dump = '' filename = LOG.logdir + 'Strings.txt' FI = fileio() newlogentry = 'Running strings against: <strong>' + AP.filename + '</strong>' LOG.WriteLog(AP.sampleid, newlogentry) newlogentry = '' subproc = subprocess.Popen('strings '+ LOG.temp + AP.filename, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) for strings_data in subproc.stdout.readlines(): AP.strings_output_data += strings_data if (AP.debug == True): print strings_data try: FI.WriteLogFile(filename, AP.strings_output_data) newlogentry = 'Strings file has been generated to file here: <a href=\"' + LOG.rafaleroot + 'samples/' + AP.sampleid + '/' + 'Strings.txt' + '\"> Strings Output </a>' LOG.WriteLog(AP.sampleid, newlogentry) newlogentry = '' except: newlogentry = 'Unable to perform strings against uploaded file' LOG.WriteLog(AP.sampleid, newlogentry) newlogentry = '' return 0
def WriteLog(self, sampleid, newlogline): FLog = fileio() nowdatetime = datetime.datetime.now().strftime("%I:%M%p on %B %d, %Y") filename = self.logdir + sampleid + '.html' data = nowdatetime + ' ' + newlogline + '\n<br/>' FLog.WriteLogFile(filename, data) return 0
def WriteLog(self, logdir, target, newlogline): FLog = fileio() nowdatetime = datetime.datetime.now().strftime("%I:%M%p on %B %d, %Y") filename = logdir + target + '.html' data = nowdatetime + ' ' + newlogline + '\n<br/>' FLog.WriteLogFile(filename, data) return 0
def POE(POE): if (POE.logging == True): LOG = logger() newlogentry = '' alx_output_data = '' output = POE.logdir + 'Abuse_ch_ransomware_URLs.txt' malware_flag = 0 if (POE.ip == True): print colored( '\r\n[-] Unable to execute abuse.ch ransomware URL grep - target must be a domain or URL - skipping.', 'yellow', attrs=['bold']) if (POE.logging == True): newlogentry = 'Unable to execute abuse.ch ransomware URL grep - target must be a domain or URL - skipping.' LOG.WriteLog(POE.logdir, POE.target, newlogentry) POE.csv_line += 'N/A,' return -1 FI = fileio() print '\r\n[*] Running abuse.ch ransomware URL grep against: ' + POE.target subproc = subprocess.Popen('grep ' + POE.target + ' /opt/mirage/feeds/RW_URLBL.txt', shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) for alx_data in subproc.stdout.readlines(): if (alx_data != ''): malware_flag = 1 print colored( '[-] Target appears in the abuse.ch Ransomware URLs feed', 'red', attrs=['bold']) if (POE.debug == True): print '[DEBUG]: ' + alx_data alx_output_data += alx_data if (alx_output_data != ''): if (POE.logging == True): newlogentry = '<strong>abuse.ch: Target appears in the abuse.ch URLs feed</strong>' LOG.WriteLog(POE.logdir, POE.target, newlogentry) POE.csv_line += 'True,' else: print colored( '[-] Target does not appear in the abuse.ch Ransomware URLs feed', 'yellow', attrs=['bold']) print colored('[x] abuse.ch ransomware URL data not written to file', 'red', attrs=['bold']) POE.csv_line += 'False,' return 0
def POE(logdir, target, logging, debug): newlogentry = '' enum_output_data = '' length = 512 signaturedb = '/opt/static/UserDB.txt' signature = '' output = logdir + 'Quickenum.txt' if (logging == True): LOG = logger() newlogentry = 'Running quickenum against: <strong>' + target.filename + '</strong>' LOG.WriteLog(logdir, target.filename, newlogentry) FI = fileio() PEX = pefile.PE(target.filename) SIG = peutils.SignatureDatabase(signaturedb) enum_output_data += '-' * 79 + '\n' enum_output_data += 'PE sections for sample: ' + target.filename + '\n' enum_output_data += '-' * 79 + '\n' enum_output_data += '\n' enum_output_data += 'File compile time: ' + str(datetime.datetime.fromtimestamp(PEX.FILE_HEADER.TimeDateStamp)) + '\n' enum_output_data += '\n' print '[*] File compile time: ' + str(datetime.datetime.fromtimestamp(PEX.FILE_HEADER.TimeDateStamp)) if (logging == True): newlogentry = 'File compile time: ' + '<strong>' + str(datetime.datetime.fromtimestamp(PEX.FILE_HEADER.TimeDateStamp)) + '</strong>' LOG.WriteLog(logdir, target.filename, newlogentry) signature = SIG.generate_ep_signature(PEX, target.filename, length) enum_output_data += 'PEiD Signature: ' + str(signature) + '\n' print '[*] PEiD Signature: ' + str(signature) + '\n' for section in PEX.sections: enum_output_data += 'Section Name: ' + section.Name[:5] + '\n' print '[*] Section Name: ' + section.Name enum_output_data += 'Virtual Address: ' + str(hex(section.VirtualAddress)) + '\n' print ' Virtual Address: ' + str(hex(section.VirtualAddress)) enum_output_data += 'Virtual Size: ' + str(hex(section.Misc_VirtualSize)) + '\n' print ' Virtual Size: ' + str(hex(section.Misc_VirtualSize)) enum_output_data += 'Raw Data Size: ' + str(section.SizeOfRawData) + '\n' print ' Raw Data Size: ' + str(section.SizeOfRawData) + '\n' enum_output_data += '\n' try: FI.WriteLogFile(output, enum_output_data) print '[*] Quickenum data had been written to file here: ' + output + '\n' if (logging == True): newlogentry = 'Quickenum file has been generated to file here: <a href=\"' + output + '\"> Enumsections Output </a>' LOG.WriteLog(logdir, target.filename, newlogentry) except: print '[x] Unable to write quickenum data to file' if (logging == True): newlogentry = 'Unable to write quickenum data to file' LOG.WriteLog(logdir, target.filename, newlogentry) return -1 return 0
def POE(POE): if (POE.logging == True): LOG = logger() newlogentry = '' cert_data = '' cert_output_data = '' https_data = [] if not POE.https_data: print colored( '\r\n[-] Cert - Active scan not undertaken for HTTPs ports. Defaulting to 443...', 'yellow', attrs=['bold']) https_data = [443] else: https_data = POE.https_data FI = fileio() for port in https_data: output = POE.logdir + 'Cert_port_' + str(port) + '.txt' print '\r\n[*] Running cert against: ' + POE.target subproc = subprocess.Popen( 'timeout 20s openssl s_client -showcerts -connect ' + POE.target + ':' + str(port), shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) for cert_data in subproc.stdout.readlines(): cert_output_data += cert_data if (POE.debug == True): print cert_data try: FI.WriteLogFile(output, cert_output_data) print colored('[*] Cert data had been written to file here: ', 'green') + colored(output, 'blue', attrs=['bold']) if (POE.logging == True): newlogentry = 'Cert file has been generated to file here: <a href=\"' + output + '\"> Cert Output </a>' LOG.WriteLog(POE.logdir, POE.target, newlogentry) POE.csv_line += 'True,' except: print colored('[x] Unable to write cert data to file', 'red', attrs=['bold']) if (POE.logging == True): newlogentry = 'Unable to cert strings data to file' LOG.WriteLog(POE.logdir, POE.target, newlogentry) POE.csv_line += 'False,' return -1 return 0
def WriteSamplesFile(self, sampleid, samplename, SHA256): FLog = fileio() nowdatetime = datetime.datetime.now().strftime("%I:%M%p on %B %d, %Y") #print 'Writing Samples.txt\n' filename = self.samplesdir.strip() + 'Samples.txt' data = 'SampleID: ' + sampleid + ' Name: ' + samplename + ' SHA256: ' + SHA256 + ' Date/Time: ' + nowdatetime + '\n' FLog.WriteLogFile(filename, data) return 0
def POE(POE): if (POE.logging == True): LOG = logger() newlogentry = '' whois_dump = '' whois_output_data = '' country = '' country_count = 0 output = POE.logdir + 'WhoIs.txt' if (POE.debug == True): print output FI = fileio() print '\r\n[*] Running WhoIs against: ' + POE.target subproc = subprocess.Popen('whois ' + POE.target, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) for whois_data in subproc.stdout.readlines(): whois_output_data += whois_data if (whois_data.find('No match for \"')!= -1): print colored('[x] No WhoIs record available for this domain...', 'red', attrs=['bold']) POE.csv_line += 'N/A,' return -1 elif (whois_data.find('connect: Network is unreachable')!= -1): print colored('[x] WhoIs is unable to connect to the network [proxy blocked?] ', 'red', attrs=['bold']) POE.csv_line += 'N/A,' return -1 elif (country_count==0): if ((whois_data.find('country')!= -1) or (whois_data.find('Country')!= -1)): country = whois_data country_count += 1 if (POE.debug == True): print whois_data try: FI.WriteLogFile(output, whois_output_data) print colored('[*] Country Code: ', 'green', attrs=['bold']) + colored(country, 'blue', attrs=['bold']) print colored('[*] WhoIs data had been written to file here: ', 'green') + colored(output, 'blue', attrs=['bold']) if (POE.logging == True): newlogentry = 'WhoIs file has been generated to file here: <a href=\"' + output + '\"> WhoIs Output </a>' LOG.WriteLog(POE.logdir, POE.target, newlogentry) newlogentry = '|-----------------> ' + country if (country==''): POE.csv_line += 'N/A,' else: POE.csv_line += country.rstrip() + ',' LOG.WriteLog(POE.logdir, POE.target, newlogentry) except: print colored('[x] Unable to write whois data to file', 'red', attrs=['bold']) if (POE.logging == True): newlogentry = 'Unable to write whois data to file' LOG.WriteLog(POE.logdir, POE.target, newlogentry) return -1 return 0
def ListModules(): FConf = fileio() count = 0 addins = '' for addins in CON.fileaddins: FConf.ReadFile(CON.modulesdir.strip() + addins.strip() + '.py') for line in FConf.fileobject: if (count == 1): print '[*] ' + addins + line count = 0 break if (line.find('***BEGIN DESCRIPTION***') != -1): count = 1 for addins in CON.peaddins: FConf.ReadFile(CON.modulesdir.strip() + addins.strip() + '.py') for line in FConf.fileobject: if (count == 1): print '[*] ' + addins + line count = 0 break if (line.find('***BEGIN DESCRIPTION***') != -1): count = 1 for addins in CON.msoaddins: FConf.ReadFile(CON.modulesdir.strip() + addins.strip() + '.py') for line in FConf.fileobject: if (count == 1): print '[*] ' + addins + line count = 0 break if (line.find('***BEGIN DESCRIPTION***') != -1): count = 1 for addins in CON.pdfaddins: FConf.ReadFile(CON.modulesdir.strip() + addins.strip() + '.py') for line in FConf.fileobject: if (count == 1): print '[*] ' + addins + line count = 0 break if (line.find('***BEGIN DESCRIPTION***') != -1): count = 1 for addins in CON.elfaddins: FConf.ReadFile(CON.modulesdir.strip() + addins.strip() + '.py') for line in FConf.fileobject: if (count == 1): print '[*] ' + addins + line count = 0 break if (line.find('***BEGIN DESCRIPTION***') != -1): count = 1 return 0
def HTMLRead(self, filename, debug): FLog = fileio() FLog.ReadFile(filename) for line in FLog.fileobject: self.html_reader_data += line if (debug == True): print line return 0
def __init__(self, word): self.word = word self.key = 'ADD' self.secret = 'ADD' self.auth = self.get_auth() self.api = self.get_api() self.tweets = [] self.tweet_total = 0 self.max_tweets = 0 self.f = fileio('tweet')
def POE(POE): if (POE.logging == True): LOG = logger() newlogentry = '' trt_output_data = '' not_found_flag = 0 output = POE.logdir + 'Traceroute.txt' FI = fileio() print '\r\n[*] Running Traceroute against: ' + POE.target subproc = subprocess.Popen('traceroute ' + POE.target, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) for trt_data in subproc.stdout.readlines(): if (POE.debug == True): print '[DEBUG]: ' + trt_data trt_output_data += trt_data if (trt_output_data.find('Name or service not known') != -1): not_found_flag = 1 try: if (not_found_flag == 1): print colored( '[x] Traceroute: Name or service not known. Traceroute data not written to file.', 'red', attrs=['bold']) if (POE.logging == True): newlogentry = 'Traceroute: Name or service not known. Traceroute data not written to file' LOG.WriteLog(POE.logdir, POE.target, newlogentry) POE.csv_line += 'False,' else: FI.WriteLogFile(output, trt_output_data) print colored( '[*] Traceroute data has been written to file here: ', 'green') + colored(output, 'blue', attrs=['bold']) if (POE.logging == True): newlogentry = 'Traceroute data has been generated to file here: <a href=\"' + output + '\"> Traceroute Output </a>' LOG.WriteLog(POE.logdir, POE.target, newlogentry) POE.csv_line += 'True,' except: print colored( '[x] Exception. Unable to write Traceroute data to file!', 'red', attrs=['bold']) if (logging == True): newlogentry = 'Exception. Unable to write Traceroute data to file!' LOG.WriteLog(POE.logdir, POE.target, newlogentry) POE.csv_line += 'False,' return -1 return 0
def POE(POE): if (POE.logging == True): LOG = logger() newlogentry = '' host_output_data = '' not_found_flag = 0 output = POE.logdir + 'host.txt' FI = fileio() print '\r\n[*] Running Host against: ' + POE.target subproc = subprocess.Popen('host -a ' + POE.target, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) for host_data in subproc.stdout.readlines(): if (POE.debug == True): print '[DEBUG]: ' + host_data host_output_data += host_data if (host_output_data.find('not found:') != -1): not_found_flag = 1 try: if (not_found_flag == 1): print colored( '[x] Host: host not found. Host data not written to file.', 'red', attrs=['bold']) if (POE.logging == True): newlogentry = 'Host: host not found. Host data not written to file.' LOG.WriteLog(POE.logdir, POE.target, newlogentry) POE.csv_line += 'False,' else: FI.WriteLogFile(output, host_output_data) print colored('[*] Host data has been written to file here: ', 'green') + colored(output, 'blue', attrs=['bold']) if (POE.logging == True): newlogentry = 'Host data has been generated to file here: <a href=\"' + output + '\"> Host Output </a>' LOG.WriteLog(POE.logdir, POE.target, newlogentry) POE.csv_line += 'True,' except: print colored('[x] Unable to write Host data to file', 'red', attrs=['bold']) if (logging == True): newlogentry = 'Unable to write Host data to file' LOG.WriteLog(POE.logdir, POE.target, newlogentry) POE.csv_line += 'False,' return -1 return 0
def ConfRead(): ret = 0 intLen = 0 FConf = fileio() data = '' try: #Conf file hardcoded here with open('/opt/mirage/mirage.conf', 'r') as read_file: data = json.load(read_file) except: print colored('[x] Unable to read configuration file.', 'red', attrs=['bold']) return -1 CON.logger = data['logger'] CON.logroot = data['logroot'] CON.useragent = data['useragent'] CON.useragent = CON.useragent.strip() CON.sleeptime = data['sleeptime'] if ((int(CON.sleeptime.strip()) < 0) or (int(CON.sleeptime.strip()) > 120)): CON.sleeptime = '7' print colored( '[x] sleeptime value out of range. sleeptime must be between 0 and 120 seconds.', 'red', attrs=['bold']) print colored('[-] sleeptime defaulting to 7 seconds.', 'yellow', attrs=['bold']) CON.modulesdir = data['modulesdir'] CON.types = data['addintypes'] CON.addins = data['addins'] if (CON.debug == True): print '[DEBUG] data: ', data print '[DEBUG] CON.logger: ' + str(CON.logger) print '[DEBUG] CON.logroot: ' + str(CON.logroot) print '[DEBUG] CON.useragent: ' + str(CON.useragent) print '[DEBUG] CON.sleeptime: ' + str(CON.sleeptime) print '[DEBUG] CON.modulesdir: ' + str(CON.modulesdir) print '[DEBUG] CON.types: ' + str(CON.types) for a_addins in CON.addins: for key, value in a_addins.iteritems(): print '[DEBUG] CON.addins key: ' + key + ' value: ' + value if (CON.debug == True): print '[*] Finished configuration.' print '' return 0
def ListAddinTypes(): FConf = fileio() count = 0 addins = '' print '[*] Addin types available are:\n' for type_out in CON.types: print '[*] Type: ' + type_out print '[*] --Or-- type all' return 0
def send_email(): base_filename = '' FLOG = fileio() for recipient_entry in CON.recipients: print '\r\n[-] Sending e-mail to: ' + recipient_entry.strip() FLOG.WriteLogFile(CON.logfile, '[-] Sending e-mail to: ' + recipient_entry.strip() + '\n') # Build the email message msg = MIMEMultipart() msg['Subject'] = CON.email_subject.strip() msg['From'] = CON.email.strip() msg['To'] = recipient_entry msg.attach(MIMEText(CON.body)) if (len(CON.attachments) >= 1): for attachment_entry in CON.attachments: if (CON.debug == True): print '\n[DEBUG] attachment_entry: ' + attachment_entry FLOG.WriteLogFile(CON.logfile, '[DEBUG] attachment_entry: ' + attachment_entry + '\n') base_filename = os.path.basename(attachment_entry) if (CON.debug == True): print '\n[DEBUG] base_filename: ' + base_filename FLOG.WriteLogFile(CON.logfile, '[DEBUG] base_filename: ' + base_filename + '\n') part = MIMEBase('application', "octet-stream") part.set_payload(open(attachment_entry, "rb").read()) Encoders.encode_base64(part) part.add_header('Content-Disposition', 'attachment; filename=' + base_filename) msg.attach(part) print '[-] Added attachment: ' + attachment_entry + '\n' FLOG.WriteLogFile(CON.logfile, '[-] Added attachment: ' + attachment_entry + '\n') server = smtplib.SMTP(CON.server,int(CON.serverport)) if (CON.emailpassthrough == False): server.ehlo() server.starttls() server.login(CON.email.strip(),CON.password.strip()) server.sendmail(recipient_entry,recipient_entry,msg.as_string()) server.quit() print '[*] E-mail sent!\n' FLOG.WriteLogFile(CON.logfile, '[*] E-mail sent!\n') return 0
def POE(logdir, targetfile, logging, debug): if (logging == True): LOG = logger() newlogentry = '' macro_dump_data = '' FI = fileio() try: filedata = open(targetfile.filename, 'rb').read() vbaparser = VBA_Parser(targetfile.filename, data=filedata) if vbaparser.detect_vba_macros(): print '[*] VBA macros found - Extracting...\n' if (logging == True): newlogentry = 'VBA macros found - Extracting...' LOG.WriteLog(logdir, targetfile.filename, newlogentry) for (filename, stream_path, vba_filename, vba_code) in vbaparser.extract_macros(): macro_dump_data += '-' * 79 + '\n' try: macro_dump_data += 'Filename :' + filename.encode("ascii", "replace") + '\n' macro_dump_data += 'OLE stream :' + stream_path.encode("ascii", "replace") + '\n' macro_dump_data += 'VBA filename:' + vba_filename.encode("ascii", "replace") + '\n' except Exception, e: print '[x] Current macro - unable to print Filename, OLE stream or VBA filename due to encoding issue (Unicode?): ', e macro_dump_data += '-' * 79 + '\n' macro_dump_data += vba_code FI.WriteLogFile(logdir + vba_filename, macro_dump_data) try: print '[*] Macro ' + vba_filename.encode("ascii", "replace") + ' extracted to: ' + logdir + vba_filename.encode("ascii", "replace") targetfile.macros.append(logdir + vba_filename.encode("ascii", "replace")) if (logging == True): newlogentry = 'Macro ' + vba_filename.encode("ascii", "replace") + ' extracted to: <a href=\"' + logdir + vba_filename.encode("ascii", "replace") + '\">' + vba_filename.encode("ascii", "replace") + '</a>' LOG.WriteLog(logdir, targetfile.filename, newlogentry) if (debug == True): print '-'*79 print 'Filename :', filename.encode("ascii", "replace") print 'OLE stream :', stream_path.encode("ascii", "replace") print 'VBA filename:', vba_filename.encode("utf-8", "ignore") print '-'*79 except Exception, e: print '[x] Current macro - unable print Filename, OLE stream or VBA filename due to encoding issue: (Unicode?)', e if (debug == True): print vba_code macro_dump_data = '' print 'Macro List' for mlist in targetfile.macros: print mlist
def LogFooter(self, logdir, target): FLog = fileio() filename = logdir + target + '.html' data = '--------------------------------------------------------------------------------' data += '---------------------------------------\n<br/>' data += 'Processed by Mirage v0.8\n<br/>' data += '--------------------------------------------------------------------------------' data += '---------------------------------------\n<br/>' data += '\n</body>\n</html>\n' FLog.WriteLogFile(filename, data) print '[*] Log file written to: ' + filename return 0
def POE(POE): if (POE.logging == True): LOG = logger() newlogentry = '' tor_output_data = '' if (POE.ip == False): print colored( '\r\n[-] Unable to execute ToR Node IP grep - target must be an IP - skipping.', 'yellow', attrs=['bold']) if (POE.logging == True): newlogentry = 'Unable to execute ToR Node IP grep - target must be an IP - skipping.' LOG.WriteLog(POE.logdir, POE.target, newlogentry) POE.csv_line += 'N/A,' return -1 FI = fileio() print '\r\n[*] Running ToR Node grep against: ' + POE.target subproc = subprocess.Popen('grep ' + POE.target + ' /opt/mirage/feeds/ToR_Exits.txt', shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) for tor_data in subproc.stdout.readlines(): if (tor_data != ''): print colored('[-] Target is or was a ToR exit node...', 'red', attrs=['bold']) if (POE.debug == True): print '[DEBUG]: ' + tor_data tor_output_data += tor_data if (tor_output_data != ''): if (POE.logging == True): newlogentry = '<strong>ToR: Target is or was a ToR exit node...</strong>' LOG.WriteLog(POE.logdir, POE.target, newlogentry) POE.csv_line += 'True,' else: print colored('[-] Target does not appear to be a ToR exit node.', 'yellow', attrs=['bold']) print colored('[x] ToR data not written to file', 'red', attrs=['bold']) POE.csv_line += 'False,' return 0
def POE(POE): if (POE.logging == True): LOG = logger() newlogentry = '' alx_output_data = '' if (POE.ip == False): print colored( '\r\n[-] Unable to execute abuse.ch Feodo IP grep - target must be an IP - skipping.', 'yellow', attrs=['bold']) if (POE.logging == True): newlogentry = 'Unable to execute abuse.ch Feodo IP grep - target must be an IP - skipping.' LOG.WriteLog(POE.logdir, POE.target, newlogentry) POE.csv_line += 'N/A,' return -1 FI = fileio() print '\r\n[*] Running abuse.ch Feodo grep against: ' + POE.target subproc = subprocess.Popen('grep ' + POE.target + ' /opt/mirage/feeds/feodo_ipblocklist.txt', shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) for alx_data in subproc.stdout.readlines(): if (alx_data != ''): print colored('[-] Target appears in the abuse.ch Feodo feed', 'red', attrs=['bold']) if (POE.debug == True): print '[DEBUG]: ' + alx_data alx_output_data += alx_data if (alx_output_data != ''): if (POE.logging == True): newlogentry = '<strong>abuse.ch: Target appears in the abuse.ch Feodo feed</strong>' LOG.WriteLog(POE.logdir, POE.target, newlogentry) POE.csv_line += 'True,' else: print colored('[-] Target does not appear in the abuse.ch Feodo feed', 'yellow', attrs=['bold']) print colored('[x] abuse.ch Feodo data not written to file', 'red', attrs=['bold']) POE.csv_line += 'False,' return 0
def lgd(numfolds=5, threshold=0.5, featsfile='featsGBM.txt'): """""" # open up file containing the columns we wish to use f = fio.fileio(usePCA=False) st = time.time() # Found these with magic numcols = pd.read_csv(featsfile).feature.values[:150].tolist() X = f.loadNumericTrain(usecols=numcols) y = f.loadLabels().loss.values print "Training data took %f seconds to load" % (time.time() - st) rgr = GradientBoostingRegressor( **ml.INIT_PARAMS['GradientBoostingRegressor']) # Load the test data Xtest = f.loadNumericTest(usecols=numcols) # Open up the train/test files bTrain = pd.read_csv('../data/subs/train/gbm.csv') bTest = pd.read_csv('../data/subs/test/gbm.csv') zp = bTrain.loss.values yp = bTest.loss.values if True: y_ = np.zeros(zp.size) Z, p = X[zp > threshold, :], y[zp > threshold] y_[zp > threshold] = ml.stratKFold(Z, p, rgr, nFolds=2, classify=False) print "CV Error: %f" % mean_absolute_error(y, y_) # Train on all, transform to log space yy = y[zp > threshold] yy = np.log10(yy + 1.) rgr.fit(X[zp > threshold, :], yy) # predict and transform yr = rgr.predict(Xtest) yr = 10.**(yr) - 1. yr[yr < 0] = 0 yr[yr > 100] = 100. print "Training took %f seconds" % (time.time() - st) sub_ = pd.read_csv('../data/sampleSubmission.csv') sub_.loss = yr * (yp > threshold) sub_.loss[sub_.loss < 0] = 0. # Write to file sub_.to_csv('../data/subs/testSubmission.csv', index=False)
def lgd(numfolds=5,threshold=0.5,featsfile='featsGBM.txt'): """""" # open up file containing the columns we wish to use f = fio.fileio(usePCA=False) st = time.time() # Found these with magic numcols = pd.read_csv(featsfile).feature.values[:150].tolist() X = f.loadNumericTrain(usecols=numcols) y = f.loadLabels().loss.values print "Training data took %f seconds to load" %(time.time() - st) rgr = GradientBoostingRegressor(**ml.INIT_PARAMS['GradientBoostingRegressor']) # Load the test data Xtest = f.loadNumericTest(usecols=numcols) # Open up the train/test files bTrain = pd.read_csv('../data/subs/train/gbm.csv') bTest = pd.read_csv('../data/subs/test/gbm.csv') zp = bTrain.loss.values yp = bTest.loss.values if True: y_ = np.zeros(zp.size) Z, p = X[zp > threshold,:], y[zp > threshold] y_[zp > threshold] = ml.stratKFold(Z, p, rgr, nFolds=2, classify=False) print "CV Error: %f"%mean_absolute_error(y, y_) # Train on all, transform to log space yy = y[zp > threshold] yy = np.log10(yy + 1.) rgr.fit(X[zp > threshold,:],yy) # predict and transform yr = rgr.predict(Xtest) yr = 10.**(yr) - 1. yr[yr < 0] = 0 yr[yr > 100] = 100. print "Training took %f seconds"%(time.time() - st) sub_ = pd.read_csv('../data/sampleSubmission.csv') sub_.loss = yr*(yp > threshold) sub_.loss[sub_.loss < 0] = 0. # Write to file sub_.to_csv('../data/subs/testSubmission.csv',index=False)
def Whois(target, logdir): FI = fileio() filename = logdir + 'Whois.txt' if (AP.debug == True): print 'Whois domain: ' + target subproc = subprocess.Popen('whois '+target, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) for whois_data in subproc.stdout.readlines(): AP.whois_output_data += whois_data if (AP.debug == True): print whois_data FI.WriteFile(filename, AP.whois_output_data)
def POE(logdir, target, logging, debug): newlogentry = '' full_dump = '' matches = '' output = logdir + 'FullDump.txt' signaturedb = '/opt/static/UserDB.txt' if (logging == True): LOG = logger() FI = fileio() SIG = peutils.SignatureDatabase(signaturedb) PEX = pefile.PE(target.filename) if (logging == True): newlogentry = 'PEiD Signature (...if signature database present):' LOG.WriteLog(logdir, target.filename, newlogentry) if (signaturedb != ''): matches = SIG.match_all(PEX, ep_only=True) print '[*] Signature Matches: ' + str(matches) if (logging == True): newlogentry = 'Signature: ' + '<strong>' + str( matches) + '</strong>' LOG.WriteLog(logdir, target.filename, newlogentry) newlogentry = 'Sample Attribute Sections:' LOG.WriteLog(logdir, target.filename, newlogentry) for section in PEX.sections: if (debug == True): print '[DEBUG] ' + section.Name + ' Virtual Address: ' + str( hex(section.VirtualAddress)) + ' Virtual Size: ' + str( hex(section.Misc_VirtualSize)) + ' Raw Data Size: ' + str( section.SizeOfRawData) try: for entry in PEX.DIRECTORY_ENTRY_IMPORT: for imp in entry.imports: if (debug == True): print '[DEBUG] imp address and name: ' + str( hex(imp.address)) + str(imp.name) except Exception, e: print '[-] Unable to process DIRECTORY_ENTRY_IMPORT object: ', e if (logging == True): newlogentry = 'Unable to process DIRECTORY_ENTRY_IMPORT object' LOG.WriteLog(logdir, target.filename, newlogentry)
def ConfRead(): ret = 0 intLen = 0 FConf = fileio() try: #Conf file hardcoded here FConf.ReadFile('/opt/mirage/mirage.conf') except: print '[x] Unable to read configuration file' return -1 for line in FConf.fileobject: intLen = len(line) if (CON.debug == True): print line if (line.find('logger') != -1): CON.logger = line[7:intLen] elif (line.find('logroot') != -1): CON.logroot = line[8:intLen] elif (line.find('useragent') != -1): CON.useragent = line[10:intLen] CON.useragent = CON.useragent.strip() elif (line.find('apikey') != -1): CON.apikey = line[7:intLen] elif (line.find('modulesdir') != -1): CON.modulesdir = line[11:intLen] elif (line.find('infoaddin') != -1): CON.infoaddins.append(line[10:intLen]) elif (line.find('activeaddin') != -1): CON.activeaddins.append(line[12:intLen]) elif (line.find('passiveaddin') != -1): CON.passiveaddins.append(line[13:intLen]) elif (line.find('sleeptime') != -1): if (CON.sleeptime == ''): CON.sleeptime = line[10:intLen] if ((int(CON.sleeptime.strip()) < 0) or (int(CON.sleeptime.strip()) > 120)): CON.sleeptime = '7' print '[x] sleeptime value out of range. sleeptime must be between 0 and 120 seconds' print '[-] sleeptime defaulting to 7 seconds.' else: if (CON.debug == True): print '' if (CON.debug == True): print 'Finished configuration.' print '' return 0
def ReportFooter(self, logdir): FLog = fileio() filename = logdir + 'logroot.html' data = '<strong>END OF FILE</strong><br/>' data += '--------------------------------------------------------------------------------' data += '---------------------------------------\n<br/>' data += 'Processed by Mirage v0.8\n<br/>' data += '--------------------------------------------------------------------------------' data += '---------------------------------------\n<br/>' data += '\n</body>\n</html>\n' FLog.WriteLogFile(filename, data) print '\n' print '[*] Report file written to: ' + filename return 0
def LogFooter(self, sampleid): FLog = fileio() self.startdatetime = datetime.datetime.now().strftime("%I:%M%p on %B %d, %Y") filename = self.logdir + sampleid + '.html' data = '<strong>END OF FILE</strong><br/>' data += '--------------------------------------------------------------------------------' data += '---------------------------------------\n<br/>' data += 'Processed by Rafale v0.3\n<br/>' data += '--------------------------------------------------------------------------------' data += '---------------------------------------\n<br/>' data += '<a href=\"' + self.home + '\"> Return Home </a>\n</body>\n</html>\n' #print 'Home: ' + self.home + '/n' FLog.WriteLogFile(filename, data) return 0
def LogFooter(self, logdir, target): FLog = fileio() self.startdatetime = datetime.datetime.now().strftime("%I:%M%p on %B %d, %Y") filename = logdir + target + '.html' data = '<strong>END OF FILE</strong><br/>' data += '--------------------------------------------------------------------------------' data += '---------------------------------------\n<br/>' data += 'Processed by Static v0.1\n<br/>' data += '--------------------------------------------------------------------------------' data += '---------------------------------------\n<br/>' data += '\n</body>\n</html>\n' FLog.WriteLogFile(filename, data) print '[*] Log file written to: ' + filename return 0
def Email(): FI = fileio() filename = LOG.logdir + 'Msg.txt' if (AP.debug == True): print 'Filename: ' + filename print 'Email Sender: ' + AP.emailsend email_output_data = '' #For some strange reason, the e-mail address values simply will not be read in as strings so they need conversion email_output_data += 'To: ' + str(LOG.emailrecp.strip()) + '\n' email_output_data += 'From: ' + str(LOG.emailsend.strip()) + '\n' email_output_data += 'Subject: Rafale Malware Information System - New Sample: ' + AP.filename + '\n' email_output_data += '\n' email_output_data += 'Hi,\n' email_output_data += '\n' email_output_data += 'Rafale has received a new sample. SampleID: ' + AP.sampleid + ' Filename: ' + AP.filename + '\n' email_output_data += '\n' email_output_data += 'Hash results are as follows: \n' email_output_data += '\n' email_output_data += 'MD5\n' email_output_data += '---------\n' email_output_data += AP.MD5 + '\n' email_output_data += ' \n' email_output_data += 'SHA256\n' email_output_data += '---------\n' email_output_data += AP.SHA256 + '\n' email_output_data += '\n' email_output_data += 'Live VirusTotal data\n' email_output_data += '---------\n' email_output_data += AP.live_virus_total + '\n\n' email_output_data += 'Strings\n' email_output_data += '---------\n' email_output_data += AP.strings_output_data + '\n' email_output_data += '\n' FI.WriteLogFile(filename, email_output_data) if (AP.debug == True): print email_output_data subproc = subprocess.Popen('/usr/sbin/ssmtp ' + str(LOG.emailrecp.strip()) + ' < ' + filename, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) for email_data in subproc.stdout.readlines(): if (AP.debug == True): print email_data
def CreateTemplate(self, target, protocol, port, filename): FIO = fileio() templateshellcode = ( "#!/usr/bin/python\n" + "#Exploit Description \n" + "\n" + "\n" +"# python imports\n" + "import os\n" + "import sys\n" + "import time\n" + "import socket\n" + "import struct\n" + "\n" + "shellcode = ()" + "\n" + "\n" + "def Exploit(target, port):\n" + " sockAddr = (" + target + ", " + str(port) + "))\n" + " tsock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n" + " tsock.connect(sockAddr)\n" + " response = tsock.recv(1024)\n" + "\n" + " #payload = input payload\n" + "\n" + " payload += ' '\n" + " tsock.send(payload)\n" + "\n" + "if __name__ == '__main__':\n" + " try:\n" + " target = sys.argv[1]\n" + " port = sys.argv[2]\n" + " except IndexError:\n" + " print 'Usage: %s <target> <port>' % sys.argv[0]\n" + " sys.exit(-1)\n" + " " + " Exploit(target, port)\n") if len(filename) < 3: filename = 'template.py' ret = 0 ret = FIO.WriteFile(filename, templateshellcode) return ret
def classifier(filename='', calctest=True, traindir='', testdir='', numfolds=5): """""" outfile = filename + '.csv' # open up file containing the columns we wish to use st = time.time() f = fio.fileio(usePCA=True) cols = ['f274', 'f727', 'f2', 'f271', 'f527', 'f528'] X = f.loadNumericTrain(usecols=cols) yf = f.loadLabels() y = yf.loss.values print "Training data took %f seconds to load" % (time.time() - st) # Train the gradient boosting classifier clf = GradientBoostingClassifier( **ml.INIT_PARAMS['GradientBoostingClassifier']) st = time.time() y_ = ml.stratKFold(X, y, clf, nFolds=numfolds) fpr, tpr, thresh = roc_curve(y > 0, y_) # Print the scores print "AUC: %f" % auc(fpr, tpr) print "F1 Score: %f" % ml.maxF1(y > 0, y_) print "%d-Fold CV took %f seconds" % (numfolds, time.time() - st) yf['loss'] = y_ yf.to_csv(traindir + outfile, index=False) if calctest: st = time.time() # Load the test data Xtest = f.loadNumericTest(usecols=cols) # Fit the data clf.fit(X, y > 0) sub_ = pd.read_csv('../data/sampleSubmission.csv') sub_.loss = clf.predict_proba(Xtest)[:, 1] # Write to file sub_.to_csv(testdir + outfile, index=False) print "Test submission took %s seconds" % (time.time() - st)
def Email(target, logdir): FI = fileio() filename = logdir + 'Msg.txt' email_output_data = '' email_output_data += 'To: ' + AP.emailrecp + '\n' email_output_data += 'From: ' + AP.emailsend + '\n' if (AP.mode == 'auto'): email_output_data += 'Subject: PYRecon Auto Run - IP: ' + target + '\n' elif (AP.mode == 'fail2ban'): email_output_data += 'Subject: PYRecon Fail2ban Alert - IP: ' + target + '\n' else: email_output_data += 'Subject: PYRecon Manual Run - IP: ' + target + '\n' email_output_data += '\n' email_output_data += 'Hi,\n' email_output_data += '\n' email_output_data += 'PYRecon has reviewed IP: ' + target + '\n' email_output_data += '\n' email_output_data += 'The results are as follows: \n' email_output_data += '\n' email_output_data += 'Whois\n' email_output_data += '---------\n' email_output_data += AP.whois_output_data + '\n' email_output_data += ' \n' email_output_data += 'Nmap\n' email_output_data += '---------\n' email_output_data += AP.nmap_output_data + '\n' email_output_data += '\n' email_output_data += 'WGet\n' email_output_data += '---------\n' email_output_data += AP.wget_output_data + '\n' email_output_data += '\n' email_output_data += 'WGet SSL\n' email_output_data += '---------\n' email_output_data += AP.wgetSSL_output_data + '\n' email_output_data += 'WGet Tomcat\n' email_output_data += '------------\n' email_output_data += AP.wgetTomcat_output_data + '\n' FI.WriteFile(filename, email_output_data) subproc = subprocess.Popen('ssmtp ' + AP.emailrecp + ' < ' + filename, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) for email_data in subproc.stdout.readlines(): if (AP.debug == True): print email_data
def LogFooter(self, logdir, target): FLog = fileio() self.startdatetime = datetime.datetime.now().strftime( "%I:%M%p on %B %d, %Y") filename = logdir + target + '.html' data = '<strong>END OF FILE</strong><br/>' data += '--------------------------------------------------------------------------------' data += '---------------------------------------\n<br/>' data += 'Processed by Static v0.1\n<br/>' data += '--------------------------------------------------------------------------------' data += '---------------------------------------\n<br/>' data += '\n</body>\n</html>\n' FLog.WriteLogFile(filename, data) print '[*] Log file written to: ' + filename return 0
def LogFooter(self, sampleid): FLog = fileio() self.startdatetime = datetime.datetime.now().strftime( "%I:%M%p on %B %d, %Y") filename = self.logdir + sampleid + '.html' data = '<strong>END OF FILE</strong><br/>' data += '--------------------------------------------------------------------------------' data += '---------------------------------------\n<br/>' data += 'Processed by Rafale v0.3\n<br/>' data += '--------------------------------------------------------------------------------' data += '---------------------------------------\n<br/>' data += '<a href=\"' + self.home + '\"> Return Home </a>\n</body>\n</html>\n' #print 'Home: ' + self.home + '/n' FLog.WriteLogFile(filename, data) return 0
def classifier(filename='', calctest=True, traindir='', testdir='', numfolds=5): """""" outfile = filename+'.csv' # open up file containing the columns we wish to use st = time.time() f = fio.fileio(usePCA=True) cols = ['f274','f727', 'f2', 'f271', 'f527', 'f528'] X = f.loadNumericTrain(usecols=cols) yf = f.loadLabels() y = yf.loss.values print "Training data took %f seconds to load" %(time.time() - st) # Train the gradient boosting classifier clf = GradientBoostingClassifier(**ml.INIT_PARAMS['GradientBoostingClassifier']) st = time.time() y_ = ml.stratKFold(X,y,clf,nFolds=numfolds) fpr, tpr, thresh = roc_curve(y > 0,y_) # Print the scores print "AUC: %f"%auc(fpr,tpr) print "F1 Score: %f"%ml.maxF1(y > 0,y_) print "%d-Fold CV took %f seconds"%(numfolds,time.time() - st) yf['loss'] = y_ yf.to_csv(traindir+outfile,index=False) if calctest: st = time.time() # Load the test data Xtest = f.loadNumericTest(usecols=cols) # Fit the data clf.fit(X,y > 0) sub_ = pd.read_csv('../data/sampleSubmission.csv') sub_.loss = clf.predict_proba(Xtest)[:,1] # Write to file sub_.to_csv(testdir+outfile,index=False) print "Test submission took %s seconds" %(time.time() - st)
def NMap(target, logdir): FI = fileio() filename = logdir + 'NMap.txt' if (AP.debug == True): print 'NMap: target: ' + target #NMap flags: -A Enable OS detection, version detection, script scanning, and traceroute # -sV Probe open ports to determine service/version info subproc = subprocess.Popen('nmap -A -sV '+target, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) for nmap_data in subproc.stdout.readlines(): AP.nmap_output_data += nmap_data if (AP.debug == True): print nmap_data FI.WriteFile(filename, AP.nmap_output_data)
def LogCreate(self, logdir, target): logroot = logdir + 'logroot.html' FLog = fileio() self.startdatetime = datetime.datetime.now().strftime("%I:%M%p on %B %d, %Y") filename = logdir + target + '.html' data = '<html>\n' data += '\n--------------------------------------------------------------------------------' data += '---------------------------------------<br/>' data += '<head>\n<title>' + filename + '</title>\n' data += '\n<strong>Starting Analysis On: </strong><br/>\n' + '\n' + '<strong>Sample: </strong>' + target data += ' <strong>Date/Time: </strong>' + self.startdatetime + '<br/>\n' data += '--------------------------------------------------------------------------------' data += '---------------------------------------<br/>\n</head>\n<body>\n' FLog.WriteNewLogFile(filename, data) return 0
def GetIPReputation(apikey, logdir): FI = fileio() filename = logdir + 'IPReputation.txt' vt = "http://www.virustotal.com/vtapi/v2/ip-address/report" parameters = {"ip": AP.ip, "apikey": apikey.rstrip('\n')} response = urllib.urlopen('%s?%s' % (vt, urllib.urlencode(parameters))).read() response_dict = json.loads(response) response_dump = json.dumps(json.JSONDecoder().decode(response), sort_keys=True, indent=4) FI.WriteFile(filename, response_dump) if (AP.debug == True): print response_dict return 0
def POE(logdir, target, logging, debug): if (logging == True): LOG = logger() newlogentry = '' whois_dump = '' whois_output_data = '' country_count = 0 output = logdir + 'WhoIs.txt' FI = fileio() print '\r\n[*] Running WhoIs against: ' + target.target subproc = subprocess.Popen('whois ' + target.target, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) for whois_data in subproc.stdout.readlines(): whois_output_data += whois_data if (whois_data.find('No match for \"')!= -1): print colored('[x] No WhoIs record available for this domain...', 'red', attrs=['bold']) elif (whois_data.find('connect: Network is unreachable')!= -1): print colored('[x] WhoIs is unable to connect to the network [proxy blocked?] ', 'red', attrs=['bold']) elif (country_count==0): if ((whois_data.find('country')!= -1) or (whois_data.find('Country')!= -1)): target.country = whois_data country_count += 1 if (debug == True): print whois_data try: FI.WriteLogFile(output, whois_output_data) print colored('[*] Country Code: ', 'green', attrs=['bold']) + colored(target.country, 'blue', attrs=['bold']) print colored('[*] WhoIs data had been written to file here: ', 'green') + colored(output, 'blue', attrs=['bold']) if (logging == True): newlogentry = 'WhoIs file has been generated to file here: <a href=\"' + output + '\"> WhoIs Output </a>' LOG.WriteLog(logdir, target.target, newlogentry) newlogentry = '|-----------------> ' + target.country LOG.WriteLog(logdir, target.target, newlogentry) except: print colored('[x] Unable to write whois data to file', 'red', attrs=['bold']) if (logging == True): newlogentry = 'Unable to write whois data to file' LOG.WriteLog(logdir, target.target, newlogentry) return -1 return 0
def Email(target, logdir, line): FI = fileio() filename = logdir + 'Msg.txt' email_output_data = '' email_output_data += 'To: ' + AP.emailrecp + '\n' email_output_data += 'From: ' + AP.emailsend + '\n' if (AP.mode == 'auto'): email_output_data += 'Subject: PYRecon Auto Run - IP: ' + target + '\n' elif (AP.mode == 'fail2ban'): email_output_data += 'Subject: PYRecon Fail2ban Alert - IP: ' + target + '\n' else: email_output_data += 'Subject: PYRecon Manual Run - IP: ' + target + '\n' email_output_data += '\n' email_output_data += 'Hi,\n' email_output_data += '\n' email_output_data += 'PYRecon has reviewed IP: ' + target + '\n' email_output_data += '\n' email_output_data += 'The results are as follows: \n' email_output_data += '\n' email_output_data += 'Event Trigger\n' email_output_data += '---------\n' email_output_data += line email_output_data += '\n' email_output_data += 'Whois\n' email_output_data += '---------\n' email_output_data += AP.whois_output_data + '\n' email_output_data += ' \n' email_output_data += 'Nmap\n' email_output_data += '---------\n' email_output_data += AP.nmap_output_data + '\n' email_output_data += '\n' email_output_data += 'Generated WGet Files\n' email_output_data += '---------\n' for port in LG.http_data: email_output_data += logdir + 'index_port' + str(port) + '.html\n' FI.WriteFile(filename, email_output_data) subproc = subprocess.Popen('/usr/sbin/ssmtp ' + AP.emailrecp + ' < ' + filename, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) for email_data in subproc.stdout.readlines(): if (AP.debug == True): print email_data
def LogCreate(self, sampleid, samplename, filename, filetype, filesize): FLog = fileio() self.startdatetime = datetime.datetime.now().strftime("%I:%M%p on %B %d, %Y") filename = self.logdir + sampleid + '.html' data = '<html>\n' data += '<a href=\"' + self.home + '\"> Return Home </a><br/>\n' data += '\n--------------------------------------------------------------------------------' data += '---------------------------------------<br/>' data += '<head>\n<title>' + sampleid + '</title>\n' data += '\n<strong>Starting Analysis On: </strong><br/>\n' + '\n' + '<strong>SampleID: </strong>' + sampleid + ' <strong>Sample: </strong>' + samplename data += ' <strong>Date/Time: </strong>' + self.startdatetime + '<br/>\n' data += ' <strong>Filename: </strong>' + filename + ' <strong>Filetype: </strong>' + filetype + ' <strong>Filesize: </strong>' + filesize + '<br/>' data += '<a href=\"' + self.rafaleroot + 'samples/' + sampleid + '/' + sampleid + '.html' + '\">Download this log file.</a> <br/>' data += '--------------------------------------------------------------------------------' data += '---------------------------------------<br/>\n</head>\n<body>\n' FLog.WriteNewLogFile(filename, data) return 0