class ClassifierSignature: def __init__(self): self.full_path = os.path.dirname(os.path.abspath(__file__)) self.signature_path = os.path.join(self.full_path, 'signature') self.util = Utilty() # Identify product name. def identify_product(self, categoy, response): prod_info_list = [] file_name = 'signature_' + categoy + '.txt' try: # Judge product using pattern matching. with codecs.open(os.path.join(self.signature_path, file_name), 'r', 'utf-8') as fin: matching_patterns = fin.readlines() for pattern in matching_patterns: items = pattern.replace('\r', '').replace('\n', '').split('@') product = items[0].lower() signature = items[2] list_match = re.findall(signature, response, flags=re.IGNORECASE) if len(list_match) != 0: # Check version. version_info = '' for target_string in list_match: version_info = self.extract_version( target_string).lower() if version_info != '': break # Add product name and version. if str(items[1]) != '': prod_info_list.append(product + '@' + str(items[1])) elif version_info != '': prod_info_list.append(product + '@' + version_info) else: prod_info_list.append(product + '@-') except Exception as err: self.util.print_message(WARNING, '{}'.format(err)) return prod_info_list # Extract version. def extract_version(self, target_string): # Regression list for cutting version. regex_list = [ r'(\d{1,3}\.\d{1,3}\.\d{1,3}).*', r'(\d{1,3}\.\d{1,3}).*', r'(\d{1,3}).*', r'(\d{1,3}\.\d{1,3}[a-z]\d{1,3}).*', r'(\d{1,3}\.\d{1,3}\.\d[a-z]{1,3}).*', r'(\d\.[xX|\*]).*' ] version_info = '' for regex_pattern in regex_list: version_list = re.findall(regex_pattern, target_string) if len(version_list) != 0: version_info = str(version_list[0]) break return version_info # Classifier product name using signatures. def classifier_signature(self, target_info, client): product_list = [] for target in target_info: for target_url in target[2]: # Get HTTP response (header + body). response = '' http = urllib3.PoolManager(timeout=self.util.http_timeout) try: client.keep_alive() self.util.print_message( OK, '{} {}'.format( self.util.get_current_date('%Y-%m-%d %H:%M:%S'), target_url)) res = http.request('GET', target_url) for header in res.headers.items(): response += header[0] + ': ' + header[1] + '\r\n' response += '\r\n\r\n' + res.data.decode('utf-8') except Exception as err: self.util.print_message(WARNING, '{}'.format(err)) for category in ['os', 'web', 'framework', 'cms']: prod_info = self.identify_product( category, self.util.delete_ctrl_char(response)) for product in prod_info: parsed = util.parse_url(target_url) path_item = os.path.split(parsed.path) if path_item[0].endswith('/') is False: product_list.append(product + '@' + str(parsed.port) + '@' + path_item[0] + '/') else: product_list.append(product + '@' + str(parsed.port) + '@' + path_item[0]) time.sleep(1.0) # Delete duplication. uniq_product = [] tmp_list = [] for item in list(set(product_list)): tmp_item = item.split('@') tmp = tmp_item[0] + tmp_item[2] if tmp not in tmp_list: tmp_list.append(tmp) uniq_product.append(item) return uniq_product
defence_method = args.attack_data_poisoning elif args.defence_type == 'model_poisoning': defence_method = args.attack_model_poisoning elif args.defence_type == 'evasion': defence_method = args.attack_evasion elif args.defence_type == 'exfiltration': defence_method = args.attack_exfiltration # Insert values to Common table. utility.insert_new_scan_record( args.target_id, args.scan_id, 'Scanning', args.model_name, args.train_data_name, args.use_x_train_num, args.train_label_name, args.test_data_name, args.use_x_test_num, args.test_label_name, args.op_type, args.attack_type, attack_method, args.defence_type, defence_method, utility.get_current_date(), args.lang) # Accuracy on Benign Examples. ret_status, acc_benign = utility.evaluate(classifier, X_test=X_test, y_test=y_test) if ret_status is False: utility.write_log( 20, '[Out] Adversarial Threat Detector [{}].'.format(file_name)) sys.exit(0) else: utility.print_message( OK, 'Accuracy on Benign Examples : {}%'.format(acc_benign * 100)) report_util.template_target['accuracy'] = '{}%'.format(acc_benign *
test_url = protocol_list[idx] + '://' + fqdn_list[idx] + path_list[ idx] else: test_url = protocol_list[idx] + '://' + fqdn_list[ idx] + ':' + port_list[idx] + path_list[idx] _, server_header, res_header, res_body, encoding = utility.send_request( 'GET', test_url) # Check cloud service. cloud_type = 'Unknown' if opt_cloud: cloud_type = cloud_checker.get_cloud_service(fqdn_list[idx]) # Search Censys. if opt_censys: date = utility.get_current_date('%Y%m%d%H%M%S%f')[:-3] print_date = utility.transform_date_string( utility.transform_date_object(date[:-3], '%Y%m%d%H%M%S')) server_info, cert_info = censys.search_censys( utility.forward_lookup(fqdn_list[idx]), fqdn_list[idx]) report.create_censys_report(fqdn_list[idx], port_list[idx], server_info, cert_info, print_date) # Analysis HTTP responses. product_list = [] if opt_log: # Check stored logs. if os.path.exists(opt_log_path) is False: utility.print_message( FAIL, 'Path not found: {}'.format(opt_log_path)) utility.write_log(30,
# Check target url. parsed = None try: parsed = util.parse_url(target_url) except Exception as err: utility.print_exception( err, 'Parsed error: {}'.format(target_url)) continue # Get HTTP response (header + body). response = '' http = urllib3.PoolManager(timeout=utility.http_timeout) try: utility.print_message( OK, '{} {}'.format( utility.get_current_date('%Y-%m-%d %H:%M:%S'), target_url)) res = http.request('GET', target_url) for header in res.headers.items(): response += header[0] + ': ' + header[1] + '\r\n' response += '\r\n\r\n' + res.data.decode('utf-8') # Write log. with codecs.open(log_file, 'w', 'utf-8') as fout: fout.write(response) except Exception as err: utility.print_exception( err, 'Target URL: {}'.format(target_url)) continue # Judge product name using string matching.