def CheckFingerprint(SourceData): """ Keyword Arguments: SourceData -- the data to where the signature will be searched This scripts searchs signatures in a set of data. """ # Now the program should read the fingerprint.txt in order to get the strings to search and compare. try: File = open(FINGERPRINT_PATH, "r") except: logger.warning("Can't read " + FINGERPRINT_PATH) return "" Found = False for line in File: line = line.strip() line = RemoveComments(line) if line == "": continue ToolName = line.split("=")[0] Fingerprint = line.split("=")[1] if SourceData.find(Fingerprint) != -1: Found = True break File.close() if Found: return ToolName else: return ""
def _check_os(self): os_running = platform.system() logger.info(f"Current operating system: {os_running}") if os_running == 'Windows': self._client = 'win' else: logger.warning( f"Your system is not tested for this application. [System: {os_running}]." ) self._client = 'other'
def _get_response(self, url, allow_redirects=None): while True: try: if allow_redirects: return requests.get(url, allow_redirects=allow_redirects) else: return requests.get(url, timeout=10) except requests.ConnectionError: logger.warning("Connection error. Trying again...") except requests.exceptions.ReadTimeout: logger.warning("Connection timeout error. Trying again...")
def _sqli_bypass(http): """ Performs a SQLi attack for creating a tautology. """ logger.info("Perform a SQLi bypass attack") for _payload in sqli_bypass_payloads: r = _run_payload_request(http, s, _payload) # in the response we should have xss if _check_reponse(r): infoMsg = "Reflected XSS successful, the intruder gains honest's cookies" logger.info(infoMsg) intruder_session.cookies.update(honest_session.cookies) # break out of the for if XSS was successful break infoMsg = "Unsuccessful payload {}".format(xss_payload) logger.info(infoMsg) warningMsg = "No SQLi bypass payload was successful" logger.warning(warningMsg)
def _xss_response(http, s): """ Performs a normal request and looks in the response for an XSS code. """ # after executing the request # there is an XSS in the response logger.info("Perform a request and check for xss in the response") r = _normal_request(http) # in the response we should have xss if _check_response(r): infoMsg = "Reflected XSS successful, the intruder gains honest's cookies" logger.info(infoMsg) intruder_session.cookies.update(honest_session.cookies) else: # abort execution, we were expenting XSS in the response warningMsg = "No XSS in the response as exxpected" logger.warning(warningMsg) exit(-1)
def _xss_reflected(http, s): """ Performs a Reflected XSS attack. Multiple requests are performed until a successful XSS is executed. """ logger.info("Perform a reflected XSS and check for XSS in the response") for _payload in xss_payloads: r = _run_payload_request(http, s, _payload) # in the response we should have xss if _check_response(r, tocheck="something"): infoMsg = "SQLi successful" logger.info(infoMsg) # break out of the for if XSS was successful break infoMsg = "Unsuccessful payload {}".format(_payload) logger.info(infoMsg) warningMsg = "No SQLi bypass payload was successful" logger.warning(warningMsg)
def sendemail(self, strData): if not self.Enabled: logger.info("E-mail notification is disabled.") return if self.SMTP_IP == "": logger.info("No SMTP server address configured.") return if self.SMTP_PORT == "": logger.info("SMTP server port is not configured.") return if self.Recipients == "": logger.info("No recipient address is configured.") return msg = MIMEMultipart() msg['To'] = self.To_header msg['From'] = self.From msg['Subject'] = self.Subject msgText = MIMEText(strData, "html") msg.attach(msgText) # Read the logo try: fp = open(ARTEMISA_WEBLOGO_PATH, 'rb') msgImage = MIMEImage(fp.read()) fp.close() except: logger.error("Cannot read file " + ARTEMISA_WEBLOGO_PATH) return # Define the image's ID as referenced above msgImage.add_header('Content-ID', '<weblogo>') msg.attach(msgImage) try: if self.TSLSSL: server = SMTP(self.SMTP_IP, int(self.SMTP_PORT)) server.ehlo() server.starttls() server.ehlo() server.login(self.SMTP_USERNAME, self.SMTP_PASSWORD) else: server = SMTP(self.SMTP_IP, int(self.SMTP_PORT)) server.ehlo() server.login(self.SMTP_USERNAME, self.SMTP_PASSWORD) server.sendmail(self.From, self.Recipients.split(","), msg.as_string()) server.quit() logger.info("E-mail notification sent.") except SMTPAuthenticationError: logger.warning( "E-mail account username and/or password refused by SMTP server." ) except Exception, e: logger.error("E-mail notification couldn't be sent. Error: " + str(e))
def execute_sqlmap(sqlmap_details): global data_to_extract logger.info("run sqlmapapi.py") is_sqlmap_up = sqlmap.run_api_server() if not is_sqlmap_up: logger.critical("sqlmap server not running") exit() task = sqlmap.new_task() # configure proxy if config.proxy_ip and config.proxy_port and config.proxy_port.isdigit(): sqlmap.set_option("proxy","http://{}:{}".format(config.proxy_ip, config.proxy_port), task) url = sqlmap_details["url"] method = sqlmap_details["method"] vuln_param = sqlmap_details["vuln_param"] url_params = "" if "get_params" in sqlmap_details: params = sqlmap_details["get_params"] url_params = "" for ab_k,real_c in params.items(): # specify an injection point if ab_k == vuln_param: url_params = url_params+real_c[0]+"="+real_c[1]+"*&" else: url_params = url_params+real_c[0]+"="+real_c[1]+"&" url_params = url_params[:-1] body_params = "" if "post_params" in sqlmap_details: params = sqlmap_details["post_params"] body_params = "" for ab_k,real_c in params.items(): # specify an injection point if ab_k == vuln_param: body_params = body_params+real_c[0]+"="+real_c[1]+"*&" else: body_params = body_params+real_c[0]+"="+real_c[1]+"&" body_params = body_params[:-1] url = url+"?"+url_params sqlmap.set_option("url",url,task) if method == "POST": sqlmap.set_option("data",body_params,task) # if "params" in sqlmap_details: # params = sqlmap_details["params"] # url_params = "" # for k,v in params.items(): # url_params = url_params+k+"="+v+"&" # url_params = url_params[:-1] # if method == "GET": # url = url+"?"+url_params # sqlmap.set_option("url",url,task) # elif method == "POST": # sqlmap.set_option("url",url,task) # sqlmap.set_option("data",url_params,task) # set cookie if present and should be considered if "cookies" in sqlmap_details: c = "" for k,v in sqlmap_details["cookies"].items(): c = c + k + "=" + v + ";" debugMsg = "sqlmap with cookie {}".format(c) logger.debug(debugMsg) sqlmap.set_option("cookie",c,task) # BEGIN: set specific attack details # data extraction if "extract" in sqlmap_details: data_to_extract = sqlmap_details["extract"] sqlmap.set_option("dumpTable","true",task) # set data extraction only if we have data to extract col = "" tbl = "" for tblcol in data_to_extract: tbl_list = tblcol.split(".") # TODO: in here we're basically overwriting the table name # whenever we find a new one tbl = tbl_list[0] col = col + tbl_list[1] sqlmap.set_option("tbl",tbl,task) sqlmap.set_option("col",col,task) if "dumpall" in sqlmap_details: # dump the entire database sqlmap.set_option("dumpAll","true",task) # file read if "read" in sqlmap_details: file_to_extract = sqlmap_details["read"] sqlmap.set_option("rFile",file_to_extract,task) # file write if "write" in sqlmap_details: file_to_write = sqlmap_details["write"] remote_path = sqlmap_details["path"] if not isfile(file_to_write): criticalMsg = "Error: {} file not found".format(file_to_write) debug.critical(criticalMsg) exit() sqlmap.set_option("wFile",join(".",file_to_write),task) sqlmap.set_option("dFile",remote_path,task) # second order if "secondOrder" in sqlmap_details: secondOrder_url = sqlmap_details["secondOrder"] sqlmap.set_option("secondOrder",secondOrder_url,task) # END: set specific attack details logger.info("sqlmap analysis started") sqlmap.start_scan(url,task) stopFlag = threading.Event() sqlmap_data = None sqlmap_log = None while not stopFlag.wait(5): r = sqlmap.get_status(task) if "terminated" in r: logger.debug(sqlmap.get_log(task)) sqlmap_data = sqlmap.get_data(task) sqlmap_log = sqlmap.get_log(task) stopFlag.set() else: logger.debug(sqlmap.get_log(task)) logger.info("sqlmap analysis in progress ... ") # we check if the last message generated by sqlmap is critical # or an error level = sqlmap_log[-1]["level"] message = sqlmap_log[-1]["message"] if level == "WARNING": logger.warning(message) if level == "INFO": logger.info(message) if level == "ERROR" or level == "CRITICAL": logger.critical("sqlmap generated an error") logger.critical(message) logger.critical("Aborting execution") exit() logger.info("sqlmap analysis terminated") return sqlmap_data, sqlmap_log
def CheckDNS(strIP, verbose): if strIP == "": return 0 DataToSend = "" # Check if strIP is an IP or a host name bDNS = False try: IP(strIP) except: bDNS = True if not bDNS: # It's an IP # If the address passed is an IP address we will not analyze it with reverse techniques (by now). # TODO: Future implementations my consider this. return "not DNS" #try: # Command = "dig -x " + strIP + " +short" # Process = Popen(Command, shell=True, stdout=PIPE) # Process.wait() # Data = Process.communicate()[0].strip().split("\n") # # if verbose: # DataToSend = "+ Verbose" + "\n" # DataToSend = DataToSend + "| Tool employed: " + Command + "\n" # DataToSend = DataToSend + "|" + "\n" # # DataToSend = DataToSend + "| Tool output:" + "\n" # for line in Data: # DataToSend = DataToSend + "| " + line + "\n" # DataToSend = DataToSend + "\n" # # strIP = Data[0] # # if strIP == "": # return DataToSend + "Domain name resolved: none" # else: # return DataToSend + "Domain name resolved: " + strIP # #except OSError: # print "WARNING dig command is not installed." # return -1 else: # The DNS analysis consists of a DNS lookup and a WHOIS search. try: Command = "dig " + strIP + " A +noall +answer +short" Process = Popen(Command, shell=True, stdout=PIPE) Process.wait() Data = Process.communicate()[0].strip().split("\n") IPResolved = Data[len(Data) - 1] if verbose: DataToSend = "+ Verbose" + "\n" DataToSend = DataToSend + "| Tool employed: " + Command + "\n" DataToSend = DataToSend + "|" + "\n" DataToSend = DataToSend + "| Tool output:" + "\n" for line in Data: DataToSend = DataToSend + "| " + line + "\n" DataToSend = DataToSend + "\n" except OSError: logger.warning("dig command is not installed.") return -1 # Try to use the whois command. If it fails, perhaps the command is not installed. try: # Store the whois' return in a variable. Command = "whois " + strIP Process = Popen(Command, shell=True, stdout=PIPE) Process.wait() Data = Process.communicate()[0] if verbose: DataToSend = DataToSend + "+ Verbose" + "\n" DataToSend = DataToSend + "| Tool employed: " + Command + "\n" DataToSend = DataToSend + "|" + "\n" DataToSend = DataToSend + "| Tool output: -too large to show here-" + "\n" DataToSend = DataToSend + "\n" # TODO: this parsing is weak and could be improved. if Data.find("NOT FOUND") != -1 or Data.find( "No match for domain") != -1: WhoisDataFound = False else: WhoisDataFound = True except OSError: logger.warning("whois is not installed.") return -1 if IPResolved == "": return DataToSend + "IP resolved: none" else: if WhoisDataFound: return DataToSend + "IP resolved: " + IPResolved + "\n" + "WHOIS data found." else: return DataToSend + "IP resolved: " + IPResolved + "\n" + "WHOIS data not found."
def generate_msc(file_attack_trace, file_aslan_model): r_time = re.compile("STATISTICS TIME (.*)") r_tested = re.compile("TESTED (.*)") r_reached = re.compile("REACHED (.*)") r_reading = re.compile("READING (.*)") r_analyze = re.compile("ANALYSE (.*)") r_unused = re.compile("UNUSED: { (.*)") r_goal = re.compile("GOAL: (.*)") r_end_unused = re.compile("(.*) }") unused_flag = 0 tmp_attack_trace = "" p1 = subprocess.Popen([ "java", "-jar", connector, "-ar", file_attack_trace, file_aslan_model ], universal_newlines=True, stderr=subprocess.PIPE, stdout=subprocess.PIPE) try: out, err = p1.communicate(timeout=10) except subprocess.TimeoutExpired: p1.kill() logger.critical("MSC creation timed out") exit() if config.verbosity: # print the generated output on a file msc_verbose = open("tmp_msc.txt", "w") msc_verbose.write(out) msc_verbose.close() f = open(file_attack_trace) msc = "" comments = False for line in f.readlines(): line = line.strip() if "SUMMARY ATTACK_FOUND" in line: # we found an attack, so return the generated MSC i = out.find("MESSAGES:") msc = out[i + 9:] logger.info("Abstract Attack Trace found:") print(msc) elif "SUMMARY NO_ATTACK_FOUND" in line: # no attack found, we don't need the MSC logger.warning("NO ATTACK FOUND") else: goal = r_goal.search(line) if goal: infoMsg = "GOAL: {}".format(goal.group(1)) logger.info(infoMsg) continue if "COMMENTS" in line: comments = True logger.info("COMMENTS") continue if "STATISTICS TIME" in line: comments = False continue if comments == True: print(line) continue tested = r_tested.search(line) if tested: infoMsg = "TESTED: {}".format(tested.group(1)) logger.info(infoMsg) continue reached = r_reached.search(line) if reached: infoMsg = "REACHED: {}".format(reached.group(1)) logger.info(infoMsg) continue analyze = r_analyze.search(line) if analyze: infoMsg = "ANALYZE: {}".format(analyze.group(1)) logger.info(infoMsg) # I return here because if I reached ANALYZE, I don't care of # reading the remaning part of the output return msc unused = r_unused.search(line) if unused: logger.debug("UNUSED:") logger.debug(unused.group(1)) unused_flag = 1 continue else: last_line_unused = r_end_unused.search(line) if unused_flag == 1 and last_line_unused: # last line of the unused logger.debug(last_line_unused.group(1)) unused_flag = 0 continue elif unused_flag == 1: # keep reading next files logger.debug(line) continue # this return is for safety reason. Theoretically it should always # return when ANALYZE is found return msc