def WriteAssets(dns_list, task_name): for info in dns_list: ip = info.get('content', '') try: if ip.find(','): ip = ip.split(',')[0] except Exception as e: print(f'[-]子域名多IP获取失败:{e}') host = info.get('url', None) status = info.get('status', None) if ip and host and status: asset_count = session.query(SrcAssets).filter(SrcAssets.asset_host == host).count() session.commit() if not asset_count: area = SelectIP(ip) flag, waf = Check_Waf(host) srcasset_sql = SrcAssets(asset_name=task_name, asset_host=host, asset_subdomain=info.get('subdomain'), asset_title=info.get('title'), asset_ip=ip, asset_area=area, asset_waf=waf, asset_cdn=False, asset_banner=info.get('banner'), asset_info='', asset_whois='') session.add(srcasset_sql) try: session.commit() except Exception as error: session.rollback() print(f'[-]子域名入库异常{error}') print(f'[+]子域名[{task_name}]入库完成')
def WriteAsset(http_info, asset_name): asset_count = session.query(SrcAssets).filter(SrcAssets.asset_host == http_info['host']).count() if not asset_count: srcasset_sql = SrcAssets(asset_name=asset_name, asset_host=http_info['host'], asset_subdomain=http_info['subdomain'], asset_title=http_info['title'], asset_ip=http_info['ip'], asset_area=http_info['area'], asset_waf=http_info['waf'], asset_cdn=False, asset_banner=http_info['banner'], asset_info='', asset_whois='') session.add(srcasset_sql) try: session.commit() except Exception as error: session.rollback() print(f'[-]Url探测-子域名入库异常{error}')
def ReadAssets(): '''读取一条资产数据''' assets_sql = session.query(SrcAssets).filter( SrcAssets.asset_xray_flag == False).first() session.commit() if assets_sql: assets_sql.asset_xray_flag = True session.add(assets_sql) try: session.commit() except Exception as error: print(f'[-]Xray扫描-修改扫描状态异常{error}') session.rollback() else: session.refresh(assets_sql, ['asset_xray_flag']) return assets_sql
def WriteTask(dns_list, task_name): '''爬虫获取的子域名再次入任务管理库''' if dns_list: for dns in dns_list: assets_sql = session.query(SrcAssets).filter( SrcAssets.asset_subdomain == dns).count() session.commit() if assets_sql: # 过滤已有子域名 continue task_sql = SrcTask(task_name=task_name, task_domain=dns) session.add(task_sql) try: session.commit() except Exception as e: session.rollback() print('ALERT', '爬虫子域名入库异常:%s' % e) print('[+]爬虫子域名入库完成')
def WritePosts(port_dict, assets_sql): '''端口扫描入库''' for info in port_dict: port_sql = SrcPorts(port_name=assets_sql.asset_name, port_host=assets_sql.asset_host, port_ip=assets_sql.asset_ip, port_port=port_dict[info]['port'], port_service=port_dict[info]['name'], port_product=port_dict[info]['product'], port_version=port_dict[info]['version']) session.add(port_sql) try: session.commit() except Exception as error: session.rollback() print(f'[-]端口入库异常{error}') print(f'[+]端口[{assets_sql.asset_ip}]入库完成')
def ReadAssets(): '''读取资产数据''' assets_sql = session.query(SrcAssets).filter( SrcAssets.asset_port_flag == False).first() session.commit() if assets_sql: ip = assets_sql.asset_ip assets_sql1 = session.query(SrcAssets).filter( SrcAssets.asset_ip == ip).all() for sql in assets_sql1: sql.asset_port_flag = True session.add(sql) try: session.commit() except Exception as error: print(f'[-]端口扫描-修改IP扫描状态异常{error}') session.rollback() return assets_sql