def save_json(self): """ 将各模块结果保存为json文件 :return: 是否保存成功 """ if not config.save_module_result: return False logger.log('TRACE', f'将{self.source}模块发现的子域结果保存为json文件') path = config.result_save_dir.joinpath(self.domain, self.module) path.mkdir(parents=True, exist_ok=True) name = self.source + '.json' path = path.joinpath(name) with open(path, mode='w', encoding='utf-8', errors='ignore') as file: result = { 'domain': self.domain, 'name': self.module, 'source': self.source, 'elapse': self.elapse, 'find': len(self.subdomains), 'subdomains': list(self.subdomains), 'records': self.records } json.dump(result, file, ensure_ascii=False, indent=4) return True
def export_all(format, path, datas): """ 将所有结果数据导出到一个文件 :param str format: 导出文件格式 :param str path: 导出文件路径 :param list datas: 待导出的结果数据 """ format = check_format(format, len(datas)) timestamp = get_timestring() name = f'all_subdomain_result_{timestamp}' path = check_path(path, name, format) logger.log('INFOR', f'所有主域的子域结果 {path}') row_list = list() for row in datas: if 'header' in row: row.pop('header') if 'response' in row: row.pop('response') keys = row.keys() values = row.values() if format in {'xls', 'xlsx'}: values = check_value(values) row_list.append(Record(keys, values)) rows = RecordCollection(iter(row_list)) content = rows.export(format) save_data(path, content)
def save_db(self, table_name, results, module_name=None): """ 将各模块结果存入数据库 :param str table_name: 表名 :param list results: 结果列表 :param str module_name: 模块名 """ logger.log('TRACE', f'正在将{module_name}模块发现{table_name}的子域' '结果存入数据库') table_name = table_name.replace('.', '_') if results: try: self.conn.bulk_query( f'insert into "{table_name}" (' f'id, type, alive, resolve, request, new, url, subdomain,' f'port, level, cname, content, public, status, reason,' f'title, banner, header, response, times, ttl, resolver,' f'module, source, elapse, find, brute, valid) ' f'values (:id, :type, :alive, :resolve, :request, :new,' f':url, :subdomain, :port, :level, :cname, :content,' f':public, :status, :reason, :title, :banner, :header,' f':response, :times, :ttl, :resolver, :module, :source,' f':elapse, :find, :brute, :valid)', results) except Exception as e: logger.log('ERROR', e)
def query(self): """ 向接口查询子域并做子域匹配 """ self.header = self.get_header() self.proxy = self.get_proxy(self.source) data = { 'query': f'parsed.names: {self.domain}', 'page': 1, 'fields': ['parsed.subject_dn', 'parsed.names'], 'flatten': True } resp = self.post(self.addr, json=data, auth=(self.id, self.secret)) if not resp: return json = resp.json() status = json.get('status') if status != 'ok': logger.log('ALERT', status) return subdomains = self.match(self.domain, str(json)) self.subdomains = self.subdomains.union(subdomains) pages = json.get('metadata').get('pages') for page in range(2, pages + 1): data['page'] = page resp = self.post(self.addr, json=data, auth=(self.id, self.secret)) if not resp: return subdomains = self.match(self.domain, str(resp.json())) self.subdomains = self.subdomains.union(subdomains)
def deal_output(output_path, ip_times, wildcard_ips, wildcard_ttl): logger.log('INFOR', f'正在处理解析结果') records = dict() # 用来记录所有域名解析数据 subdomains = list() # 用来保存所有通过有效性检查的子域 with open(output_path) as fd: for line in fd: line = line.strip() try: items = json.loads(line) except Exception as e: logger.log('ERROR', e.args) logger.log('ERROR', f'解析行{line}出错跳过解析该行') continue qname = items.get('name')[:-1] # 去出最右边的`.`点号 status = items.get('status') if status != 'NOERROR': logger.log('TRACE', f'处理{line}时发现{qname}查询结果状态{status}') continue data = items.get('data') if 'answers' not in data: logger.log('TRACE', f'处理{line}时发现{qname}返回的结果无应答') continue records, subdomains = gen_records(items, records, subdomains, ip_times, wildcard_ips, wildcard_ttl) return records, subdomains
def check_path(path, name, format): """ 检查结果输出目录路径 :param path: 保存路径 :param name: 导出名字 :param format: 保存格式 :return: 保存路径 """ filename = f'{name}.{format}' default_path = config.result_save_dir.joinpath(filename) if isinstance(path, str): path = repr(path).replace('\\', '/') # 将路径中的反斜杠替换为正斜杠 path = path.replace('\'', '') # 去除多余的转义 else: path = default_path path = Path(path) if not path.suffix: # 输入是目录的情况 path = path.joinpath(filename) parent_dir = path.parent if not parent_dir.exists(): logger.log('ALERT', f'不存在{parent_dir}目录将会新建') parent_dir.mkdir(parents=True, exist_ok=True) if path.exists(): logger.log('ALERT', f'存在{path}文件将会覆盖') return path
def query(self, sql): try: results = self.conn.query(sql) except Exception as e: logger.log('ERROR', e.args) else: return results
def query(self): """ 向接口查询子域并做子域匹配 """ base_addr = 'http://114.55.181.28/check_web/' \ 'databaseInfo_mainSearch.action' page_num = 1 while True: time.sleep(self.delay) self.header = self.get_header() self.proxy = self.get_proxy(self.source) params = { 'isSearch': 'true', 'searchType': 'url', 'term': self.domain, 'pageNo': page_num } try: resp = self.get(base_addr, params) except Exception as e: logger.log('ERROR', e.args) break if not resp: break subdomains = self.match(self.domain, resp.text) self.subdomains = self.subdomains.union(subdomains) if not subdomains: break if page_num > 10: break page_num += 1
def save(self): logger.log('DEBUG', '正在保存检查结果') if self.format == 'txt': data = str(self.results) else: data = self.results.export(self.format) utils.save_data(self.path, data)
def axfr(self, server): """ 执行域传送 :param server: 域名服务器 """ logger.log('DEBUG', f'尝试对{self.domain}的域名服务器{server}进行域传送') try: xfr = dns.query.xfr(where=server, zone=self.domain, timeout=5.0, lifetime=10.0) zone = dns.zone.from_xfr(xfr) except Exception as e: logger.log('DEBUG', e.args) logger.log('DEBUG', f'对{self.domain}的域名服务器{server}进行域传送失败') return names = zone.nodes.keys() for name in names: full_domain = str(name) + '.' + self.domain subdomain = utils.match_subdomain(self.domain, full_domain) self.subdomains = self.subdomains.union(subdomain) record = zone[name].to_text(name) self.results.append(record) if self.results: logger.log('DEBUG', f'发现{self.domain}在{server}上的域传送记录') logger.log('DEBUG', '\n'.join(self.results)) self.results = []
def get(self, url, params=None, check=True, **kwargs): """ 自定义get请求 :param str url: 请求地址 :param dict params: 请求参数 :param bool check: 检查响应 :param kwargs: 其他参数 :return: requests响应对象 """ try: resp = requests.get(url, params=params, cookies=self.cookie, headers=self.header, proxies=self.proxy, timeout=self.timeout, verify=self.verify, **kwargs) except Exception as e: logger.log('ERROR', e.args) return None if not check: return resp if utils.check_response('GET', resp): return resp return None
def search(self, full_search=False): """ 向接口查询子域并做子域匹配 """ page_num = 1 while True: time.sleep(self.delay) params = {'pageno': page_num, 'q': self.domain, 'type': 'code'} try: resp = self.get(self.addr, params=params) except Exception as e: logger.log('ERROR', e.args) break if not resp: break if resp.status_code != 200: logger.log('ERROR', f'{self.source}模块搜索出错') break if 'class="empty-box"' in resp.text: break soup = BeautifulSoup(resp.text, 'html.parser') subdomains = self.match(self.domain, soup.text) self.subdomains = self.subdomains.union(subdomains) if not subdomains: break if not full_search: # 搜索中发现搜索出的结果有完全重复的结果就停止搜索 if subdomains.issubset(self.subdomains): break if '<li class="disabled"><a href="###">' in resp.text: break if page_num > 100: break page_num += 1
def drop_table(self, table_name): """ 删除表 :param str table_name: 表名 """ table_name = table_name.replace('.', '_') logger.log('TRACE', f'正在删除{table_name}表') self.query(f'drop table if exists "{table_name}"')
def get_data(self, table_name): """ 获取表中的所有数据 :param str table_name: 表名 """ table_name = table_name.replace('.', '_') logger.log('TRACE', f'获取{table_name}表中的所有数据') return self.query(f'select * from "{table_name}"')
def get_cname(subdomain): resolver = utils.dns_resolver() try: answers = resolver.query(subdomain, 'CNAME') except Exception as e: logger.log('TRACE', e.args) return None for answer in answers: return answer.to_text() # 一个子域只有一个CNAME记录
def clear_table(self, table_name): """ 清空表中数据 :param str table_name: 表名 """ table_name = table_name.replace('.', '_') logger.log('TRACE', f'正在清空{table_name}表中的数据') self.query(f'delete from "{table_name}"')
def remove_invalid(self, table_name): """ 去除表中的空值或无效子域 :param str table_name: 表名 """ table_name = table_name.replace('.', '_') logger.log('TRACE', f'正在去除{table_name}表中的无效子域') self.query(f'delete from "{table_name}" where ' f'subdomain is null or resolve == 0')
def check(self, *apis): """ 简单检查是否配置了api信息 :param apis: api信息元组 :return: 检查结果 """ if not all(apis): logger.log('ALERT', f'{self.source}模块没有配置API跳过执行') return False return True
def deduplicate_subdomain(self, table_name): """ 去重表中的子域 :param str table_name: 表名 """ table_name = table_name.replace('.', '_') logger.log('TRACE', f'正在去重{table_name}表中的子域') self.query(f'delete from "{table_name}" where ' f'id not in (select min(id) ' f'from "{table_name}" group by subdomain)')
def compare(self, subdomain, cname, responses): domain_resp = self.get('http://' + subdomain, check=False) cname_resp = self.get('http://' + cname, check=False) if domain_resp is None or cname_resp is None: return for resp in responses: if resp in domain_resp.text and resp in cname_resp.text: logger.log('ALERT', f'{subdomain}存在子域接管风险') self.results.append([subdomain, cname]) break
def save_db(self): """ 将模块结果存入数据库中 """ logger.log('DEBUG', f'正在将结果存入到数据库') lock.acquire() db = Database() db.create_table(self.domain) db.save_db(self.domain, self.results, self.source) db.close() lock.release()
def rename_table(self, table_name, new_table_name): """ 重命名表名 :param str table_name: 表名 :param str new_table_name: 新表名 """ table_name = table_name.replace('.', '_') new_table_name = new_table_name.replace('.', '_') logger.log('TRACE', f'正在将{table_name}表重命名为{table_name}表') self.query(f'alter table "{table_name}" ' f'rename to "{new_table_name}"')
def save_data(name, data): """ 保存解析结果到数据库 :param str name: 保存表名 :param list data: 待保存的数据 """ logger.log('INFOR', f'正在保存解析结果') db = Database() db.drop_table(name) db.create_table(name) db.save_db(name, data, 'resolve') db.close()
def copy_table(self, table_name, bak_table_name): """ 复制表创建备份 :param str table_name: 表名 :param str bak_table_name: 新表名 """ table_name = table_name.replace('.', '_') bak_table_name = bak_table_name.replace('.', '_') logger.log('TRACE', f'正在将{table_name}表复制到{bak_table_name}新表') self.query(f'drop table if exists "{bak_table_name}"') self.query(f'create table "{bak_table_name}" ' f'as select * from "{table_name}"')
def filter_subdomain(data): """ 过滤出无解析内容的子域到新的子域列表 :param list data: 待过滤的数据列表 :return: 符合条件的子域列表 """ logger.log('DEBUG', f'正在过滤出待解析的子域') subdomains = [] for data in data: if not data.get('content'): subdomain = data.get('subdomain') subdomains.append(subdomain) return subdomains
def check(self): """ 正则匹配响应头中的内容安全策略字段以发现子域名 """ if not self.csp_header: self.csp_header = self.grab_header() csp = self.header.get('Content-Security-Policy') if not self.csp_header: logger.log('DEBUG', f'获取{self.domain}域的请求头失败') return if not csp: logger.log('DEBUG', f'{self.domain}域的响应头不存在内容安全策略字段') return self.subdomains = utils.match_subdomain(self.domain, csp)
def check(self): """ 获取域名证书并匹配证书中的子域名 """ try: ctx = ssl.create_default_context() sock = ctx.wrap_socket(socket.socket(), server_hostname=self.domain) sock.connect((self.domain, self.port)) cert_dict = sock.getpeercert() except Exception as e: logger.log('DEBUG', e.args) return subdomains = utils.match_subdomain(self.domain, str(cert_dict)) self.subdomains = self.subdomains.union(subdomains)
def check(self): """ 正则匹配响应头中的内容安全策略字段以发现子域名 """ resolver = utils.dns_resolver() try: answers = resolver.query(self.domain, "NS") except Exception as e: logger.log('ERROR', e.args) return nsservers = [str(answer) for answer in answers] if not len(nsservers): logger.log('ALERT', f'没有找到{self.domain}的NS域名服务器记录') return for nsserver in nsservers: self.axfr(nsserver)
def update_data(data, records): """ 更新解析结果 :param list data: 待更新的数据列表 :param dict records: 解析结果字典 :return: 更新后的数据列表 """ logger.log('DEBUG', f'正在更新解析结果') for index, items in enumerate(data): if not items.get('content'): subdomain = items.get('subdomain') record = records.get(subdomain) items.update(record) data[index] = items return data
def exist_table(self, table_name): """ 判断是否存在某表 :param str table_name: 表名 :return: 是否存在某表 """ table_name = table_name.replace('.', '_') logger.log('TRACE', f'正在查询是否存在{table_name}表') results = self.query(f'select count() from sqlite_master ' f'where type = "table" and ' f'name = "{table_name}"') if results.scalar() == 0: return False else: return True