def main(self, domain): start = time.time() logger.log('INFOR', f'Blasting {domain} ') massdns_dir = setting.third_party_dir.joinpath('massdns') result_dir = setting.result_save_dir temp_dir = result_dir.joinpath('temp') utils.check_dir(temp_dir) massdns_path = utils.get_massdns_path(massdns_dir) timestring = utils.get_timestring() wildcard_ips = list() # 泛解析IP列表 wildcard_ttl = int() # 泛解析TTL整型值 ns_list = query_domain_ns(self.domain) ns_ip_list = query_domain_ns_a(ns_list) # DNS权威名称服务器对应A记录列表 self.enable_wildcard = detect_wildcard(domain, ns_ip_list) if self.enable_wildcard: wildcard_ips, wildcard_ttl = collect_wildcard_record(domain, ns_ip_list) ns_path = get_nameservers_path(self.enable_wildcard, ns_ip_list) dict_set = self.gen_brute_dict(domain) dict_len = len(dict_set) dict_name = f'generated_subdomains_{domain}_{timestring}.txt' dict_path = temp_dir.joinpath(dict_name) save_brute_dict(dict_path, dict_set) del dict_set gc.collect() output_name = f'resolved_result_{domain}_{timestring}.json' output_path = temp_dir.joinpath(output_name) log_path = result_dir.joinpath('massdns.log') check_dict() logger.log('INFOR', f'Running massdns to brute subdomains') utils.call_massdns(massdns_path, dict_path, ns_path, output_path, log_path, quiet_mode=self.quite, process_num=self.process_num, concurrent_num=self.concurrent_num) output_paths = [] if self.process_num == 1: output_paths.append(output_path) else: for i in range(self.process_num): output_name = f'resolved_result_{domain}_{timestring}.json{i}' output_path = temp_dir.joinpath(output_name) output_paths.append(output_path) ip_times = stat_ip_times(output_paths) self.records, self.subdomains = deal_output(output_paths, ip_times, wildcard_ips, wildcard_ttl) delete_file(dict_path, output_paths) end = time.time() self.elapse = round(end - start, 1) logger.log('INFOR', f'{self.source} module takes {self.elapse} seconds, ' f'found {len(self.subdomains)} subdomains of {domain}') logger.log('DEBUG', f'{self.source} module found subdomains of {domain}:\n' f'{self.subdomains}') self.gen_result(brute=dict_len, valid=len(self.subdomains)) self.save_db() return self.subdomains
def main(self, domain): start = time.time() logger.log('INFOR', f'Blasting {domain} ') massdns_dir = settings.third_party_dir.joinpath('massdns') result_dir = settings.result_save_dir temp_dir = result_dir.joinpath('temp') utils.check_dir(temp_dir) massdns_path = utils.get_massdns_path(massdns_dir) timestring = utils.get_timestring() wildcard_ips = list() # 泛解析IP列表 wildcard_ttl = int() # 泛解析TTL整型值 ns_list = query_domain_ns(self.domain) ns_ip_list = query_domain_ns_a(ns_list) # DNS权威名称服务器对应A记录列表 if self.enable_wildcard is None: self.enable_wildcard = wildcard.detect_wildcard(domain) if self.enable_wildcard: wildcard_ips, wildcard_ttl = wildcard.collect_wildcard_record( domain, ns_ip_list) ns_path = utils.get_ns_path(self.in_china, self.enable_wildcard, ns_ip_list) dict_set = self.gen_brute_dict(domain) dict_name = f'generated_subdomains_{domain}_{timestring}.txt' dict_path = temp_dir.joinpath(dict_name) save_brute_dict(dict_path, dict_set) del dict_set gc.collect() output_name = f'resolved_result_{domain}_{timestring}.json' output_path = temp_dir.joinpath(output_name) log_path = result_dir.joinpath('massdns.log') check_dict() logger.log('INFOR', f'Running massdns to brute subdomains') utils.call_massdns(massdns_path, dict_path, ns_path, output_path, log_path, quiet_mode=self.quite, concurrent_num=self.concurrent_num) appear_times = stat_appear_times(output_path) self.infos, self.subdomains = deal_output(output_path, appear_times, wildcard_ips, wildcard_ttl) delete_file(dict_path, output_path) end = time.time() self.elapse = round(end - start, 1) logger.log( 'ALERT', f'{self.source} module takes {self.elapse} seconds, ' f'found {len(self.subdomains)} subdomains of {domain}') logger.log( 'DEBUG', f'{self.source} module found subdomains of {domain}: ' f'{self.subdomains}') self.gen_result() self.save_db() return self.subdomains
def main(self, domain): start = time.time() logger.log('INFOR', f'正在爆破域名{domain}') massdns_dir = config.third_party_dir.joinpath('massdns') result_dir = config.result_save_dir temp_dir = result_dir.joinpath('temp') utils.check_dir(temp_dir) massdns_path = utils.get_massdns_path(massdns_dir) timestring = utils.get_timestring() wildcard_ips = list() # 泛解析IP列表 wildcard_ttl = int() # 泛解析TTL整型值 ns_list = query_domain_ns(self.domain) ns_ip_list = query_domain_ns_a(ns_list) # DNS权威名称服务器对应A记录列表 self.enable_wildcard = detect_wildcard(domain, ns_ip_list) if self.enable_wildcard: wildcard_ips, wildcard_ttl = collect_wildcard_record( domain, ns_ip_list) ns_path = get_nameservers_path(self.enable_wildcard, ns_ip_list) dict_set = self.gen_brute_dict(domain) dict_len = len(dict_set) dict_name = f'generated_subdomains_{domain}_{timestring}.txt' dict_path = temp_dir.joinpath(dict_name) save_brute_dict(dict_path, dict_set) del dict_set gc.collect() output_name = f'resolved_result_{domain}_{timestring}.json' output_path = temp_dir.joinpath(output_name) log_path = result_dir.joinpath('massdns.log') check_dict() utils.call_massdns(massdns_path, dict_path, ns_path, output_path, log_path, process_num=self.process_num, concurrent_num=self.concurrent_num) ip_times = stat_ip_times(output_path) self.records, self.subdomains = deal_output(output_path, ip_times, wildcard_ips, wildcard_ttl) delete_file(dict_path, output_path) end = time.time() self.elapse = round(end - start, 1) logger.log( 'INFOR', f'{self.source}模块耗时{self.elapse}秒' f'发现{domain}的子域{len(self.subdomains)}个') logger.log('DEBUG', f'{self.source}模块发现{domain}的子域:\n' f'{self.subdomains}') self.gen_result(brute=dict_len, valid=len(self.subdomains)) self.save_db() return self.subdomains
def run_resolve(domain, data): """ 调用子域解析入口函数 :param str domain: 待解析的主域 :param list data: 待解析的子域数据列表 :return: 解析得到的结果列表 :rtype: list """ logger.log('INFOR', f'Start resolving subdomains of {domain}') subdomains = filter_subdomain(data) if not subdomains: return data massdns_dir = settings.third_party_dir.joinpath('massdns') result_dir = settings.result_save_dir temp_dir = result_dir.joinpath('temp') utils.check_dir(temp_dir) massdns_path = utils.get_massdns_path(massdns_dir) timestring = utils.get_timestring() save_name = f'collected_subdomains_{domain}_{timestring}.txt' save_path = temp_dir.joinpath(save_name) save_subdomains(save_path, subdomains) del subdomains gc.collect() output_name = f'resolved_result_{domain}_{timestring}.json' output_path = temp_dir.joinpath(output_name) log_path = result_dir.joinpath('massdns.log') ns_path = settings.brute_nameservers_path logger.log('INFOR', f'Running massdns to resolve subdomains') utils.call_massdns(massdns_path, save_path, ns_path, output_path, log_path, quiet_mode=True) infos = deal_output(output_path) data = update_data(data, infos) logger.log('INFOR', f'Finished resolve subdomains of {domain}') return data
def run_resolve(domain, data): """ Call the subdomain analysis entry function :param str domain: The main domain to be resolved :param list data: List of subdomain data to be parsed :return: List of results obtained by parsing :rtype: list """ logger.log('INFOR', f'Start resolving subdomains of {domain}') subdomains = filter_subdomain(data) if not subdomains: return data massdns_dir = settings.third_party_dir.joinpath('massdns') result_dir = settings.result_save_dir temp_dir = result_dir.joinpath('temp') utils.check_dir(temp_dir) massdns_path = utils.get_massdns_path(massdns_dir) timestring = utils.get_timestring() save_name = f'collected_subdomains_{domain}_{timestring}.txt' save_path = temp_dir.joinpath(save_name) save_subdomains(save_path, subdomains) del subdomains gc.collect() output_name = f'resolved_result_{domain}_{timestring}.json' output_path = temp_dir.joinpath(output_name) log_path = result_dir.joinpath('massdns.log') ns_path = utils.get_ns_path() logger.log('INFOR', f'Running massdns to resolve subdomains') utils.call_massdns(massdns_path, save_path, ns_path, output_path, log_path, quiet_mode=True) infos = deal_output(output_path) data = update_data(data, infos) logger.log('INFOR', f'Finished resolve subdomains of {domain}') return data
def main(args): env = DroneEnv(random=args.env_reset_mode, headless=True, seed=args.seed, reward_function_name=args.reward_function, state=args.state) use_cuda = torch.cuda.is_available() if use_cuda and (args.use_cuda): device = torch.device("cuda") else: device = torch.device("cpu") # Set save/restore paths save_path = os.path.join('./checkpoint/', args.save_path) + '/' restore_path = args.restore_path or save_path report_folder = save_path # save_path.split('/')[0] + '/' # Check if they exist utils.check_dir(save_path) if restore_path: utils.check_dir(restore_path) utils.check_dir(report_folder) # Preparing log csv if not os.path.isfile(os.path.join(report_folder, 'progress.csv')): print('There is no csv there') with open(os.path.join(report_folder, 'progress.csv'), 'w') as outcsv: writer = csv.writer(outcsv, delimiter=';', quotechar='|', quoting=csv.QUOTE_MINIMAL) writer.writerow(["Episode", "Total time (s)", "Frame", "Buffer_size", "Mean_Reward", "value_loss", "q_value_loss", "policy_loss", "episode_lenght"]) # Network and env parameters action_dim = env.action_space.shape[0] try: state_dim = env.observation_space.shape[0] except BaseException: state_dim = env.observation_space hidden_dim = args.net_size_value action_range = [env.agent.action_space.low.min( ), env.agent.action_space.high.max()] sac = SAC( env=env, replay_buffer_size=args.replay_buffer_size, hidden_dim=hidden_dim, restore_path=restore_path, device=device, save_path=save_path, learning_rate=args.learning_rate, max_episodes=args.max_episodes, use_double=args.use_double, save_interval=args.save_interval) sac.train()