Example #1
0
 def __init__(self,xml_command):
     try:
         for attr in ['name','cmd_line']:
             setattr(self,attr,str(xml_command.getElementsByTagName(attr)[0].childNodes[0].toxml()))
     except IndexError:
         logger.log("Attribute not found: "+attr)
         sys.exit(0)
Example #2
0
    def send_command(self,command):
        logger.log("Executing: "+command.cmd_line)

        self.connection.buffer = ''
        self.connection.logfile_read = sys.stdout
        self.connection.sendline(command.cmd_line)
        self.connection.expect(self.expected, timeout=None)
Example #3
0
 def interact(self):
     self.interactive = True
     try:
         self.connection.sendline('reset')
         self.connection.interact()
         sys.exit(0)
     except Exception as e:
         logger.log(str(e))
         logger.log("Error switching to interactive mode")
         sys.exit(1)
Example #4
0
 def __init__(self,xml_server):
     for attr in ['user','password','host','port','name','protocol','parent','prompt']:
         try:
             setattr(self,attr,xml_server.getElementsByTagName(attr)[0].childNodes[0].data)
         except IndexError:
             if attr == 'parent':
                 self.parent = None
             elif attr == 'prompt':
                 self.prompt = self.user+'@'+self.host
             else:
                 logger.log("Attribute not found: "+attr)
                 sys.exit(0)
     proto = protocol.Protocol(self.protocol)
     proto.initialize(self)
     self.protocol = proto
Example #5
0
 def establish(self):
     self.connection = pexpect.spawn(LOCAL_SHELL)
     self.connection.delaybeforesend = 0.5
             
     for host in self.conn_chain:
         logger.log('Connecting to: '+host.name)
         self.expected= host.protocol.expected
         self.expected.append(pexpect.EOF)
         self.expected.append(pexpect.TIMEOUT)
         initial_state = host.protocol.get_initial_state()
         self.connection.sendline(initial_state.send)
         self.connection.setecho(False)
         received = self.connection.expect(self.expected)
         sendline = '$$$'
         while sendline:
             sendline = host.protocol.get_sendline(self.expected[received])
             if sendline:
                 self.connection.sendline(sendline)
                 self.connection.setecho(False)
                 received = self.connection.expect(self.expected)               
Example #6
0
def main():
    parser = argparse.ArgumentParser(prog='sponge.py')
    parser.add_argument('-s','--server', required=True, help='server to connect to (defined in servers.xml)')
    parser.add_argument('-c','--command', help='command to execute (defined in commands.xml)')
    
    
    args = parser.parse_args()
    sponge_dir = os.path.join(os.getenv('HOME'),DEFAULT_DIR)
    conf = config.Config(sponge_dir)
    serverchain = conf.get_serverschain(args.server)
    command = None
    if args.command:
        command = conf.get_command(args.command)
    if args.command and not command:
        logger.log('Error: Command not found. Exiting')
        sys.exit()
    conn = connection.Connection(serverchain)
    conn.establish()
    if not args.command:
        conn.interact()
    else:
        conn.send_command(command)
Example #7
0
 def send(self, urls, data=None, **kwargs):
     """send request to url.If response 200,return response, else return None."""
     allow_redirects = False
     is_logger = urls["is_logger"]
     error_data = {"code": 99999, "message": u"重试次数达到上限"}
     self.setHeadersReferer(urls["Referer"])
     if data:
         method = "post"
         self.setHeaders({"Content-Length": "{0}".format(len(data))})
     else:
         method = "get"
         self.resetHeaders()
     if is_logger:
         logger.log(
             u"url: {0}\n入参: {1}\n请求方式: {2}\n".format(urls["req_url"],data,method,))
     self.setHeadersHost(urls["Host"])
     if self.cdn:
         url_host = self.cdn
     else:
         url_host = urls["Host"]
     for i in range(urls["re_try"]):
         try:
             # sleep(urls["s_time"]) if "s_time" in urls else sleep(0.001)
             sleep(urls.get("s_time", 0.001))
             requests.packages.urllib3.disable_warnings()
             response = self._s.request(method=method,
                                        timeout=2,
                                        url="https://" + url_host + urls["req_url"],
                                        data=data,
                                        allow_redirects=allow_redirects,
                                        verify=False,
                                        **kwargs)
             if response.status_code == 200:
                 if response.content:
                     if is_logger:
                         logger.log(
                             u"出参:{0}".format(response.content))
                     return json.loads(response.content) if method == "post" else response.content
                 else:
                     logger.log(
                         u"url: {} 返回参数为空".format(urls["req_url"]))
                     return error_data
             else:
                 sleep(urls["re_time"])
         except (requests.exceptions.Timeout, requests.exceptions.ReadTimeout, requests.exceptions.ConnectionError):
             pass
         except socket.error:
             pass
     return error_data
Example #8
0
data = data_loaders[C.data](
    dataset_location=C.data_load,
    save_path=C.data_save,
    save_name=C.data,
    force_reprocess=C.force_reprocess,
    smallize=C.smallize,
)

train_data, test_data = data["train"], data["test"]

if C.valid_size > 0:
    C.valid_size = min(C.valid_size, len(data["train"]) // 10)
    data["valid"] = data["train"][:C.valid_size]
    data["train"] = data["train"][C.valid_size:]

logger.log("Data load done.")
logger.log("train size = %d , vali size = %d test size = %d" %
           (len(data["train"]), len(data["valid"]), len(data["test"])))
#---------------------------------------------------------------------------------------------------
#Get model

models = {
    "transformer": MD_Transformer,
    "resnet": ResNet,
    "3p-resnet": ResNet_56_3p,
}
model = models[C.model]
net = model(num_class=10,
            input_size=[32, 32],
            **{x: C.__dict__[x]
               for x in model.choose_kwargs()})
Example #9
0
    def run(self, rx_queue=None):
        self.domains = utils.get_domains(self.target)
        while self.domains:
            self.domain = self.domains.pop()
            start = time.time()
            db = Database()
            db.create_table(self.domain)
            if not rx_queue:
                rx_queue = queue.Queue()
            logger.log('INFOR', f'开始执行{self.source}模块爆破域名{self.domain}')
            logger.log('INFOR', f'使用{self.process}进程乘{self.coroutine}协程')
            # fuzz模式不使用递归爆破
            if self.recursive_brute and not self.fuzz:
                logger.log('INFOR', f'开始递归爆破{self.domain}的第1层子域')
            loop = asyncio.get_event_loop()
            asyncio.set_event_loop(loop)
            loop.run_until_complete(self.main(self.domain, rx_queue))

            # 递归爆破下一层的子域
            # fuzz模式不使用递归爆破
            if self.recursive_brute and not self.fuzz:
                for layer_num in range(1, self.recursive_depth):
                    # 之前已经做过1层子域爆破 当前实际递归层数是layer+1
                    logger.log('INFOR', f'开始递归爆破{self.domain}的'
                               f'第{layer_num + 1}层子域')
                    for subdomain in self.subdomains.copy():
                        # 进行下一层子域爆破的限制条件
                        if subdomain.count('.') - self.domain.count('.') \
                                == layer_num:
                            loop.run_until_complete(
                                self.main(subdomain, rx_queue))
            # 队列不空就一直取数据存数据库
            while not rx_queue.empty():
                results = rx_queue.get()
                # 将结果存入数据库中
                db.save_db(self.domain, results, self.source)

            end = time.time()
            self.elapsed = round(end - start, 1)
            logger.log('INFOR', f'结束执行{self.source}模块爆破域名{self.domain}')
            length = len(self.subdomains)
            logger.log(
                'INFOR', f'{self.source}模块耗时{self.elapsed}秒'
                f'发现{self.domain}的域名{length}个')
            logger.log(
                'DEBUG', f'{self.source}模块发现{self.domain}的域名:\n'
                f'{self.subdomains}')
            if not self.path:
                name = f'{self.domain}_brute.{self.format}'
                self.path = config.result_save_path.joinpath(name)
            # 数据库导出
            if self.export:
                dbexport.export(self.domain,
                                valid=self.valid,
                                path=self.path,
                                format=self.format,
                                show=self.show)
Example #10
0
from model.resnet import Model as ResNet
from model.third_party import ResNet_56_3p
from config import C, logger
from dataloader_cifar10 import n_crop_test as n_crop_test_cifar_10

import pdb

#---------------------------------------------------------------------------------------------------
#Get data
data_loaders = {
    "cifar-10": n_crop_test_cifar_10,
}

test_data = data_loaders[C.data](dataset_location=C.data_path, n_crop=C.n_crop)

logger.log("Data load done.")
logger.log("test size = %d" % (len(test_data)))
#---------------------------------------------------------------------------------------------------
#Get model

with open(os.path.join(C.model_path, C.model_save), "rb") as fil:
    net = pickle.load(fil).cuda(C.gpus[0])

logger.log("Load network done.")
#---------------------------------------------------------------------------------------------------
#fastNLP Test

if C.n_crop <= 1:
    fastNLP_data = DataSet()
    for s, lab in test_data:
        fastNLP_data.append(Instance(s=s[0], label=lab))
Example #11
0
def do_query_a(domain, resolver):
    try:
        answer = resolver.query(domain, 'A')
    # 如果查询随机域名A记录时抛出Timeout异常则重新探测
    except Timeout as e:
        logger.log('ALERT', f'探测超时重新探测中')
        logger.log('DEBUG', e.args)
        raise tenacity.TryAgain
    # 如果查询随机域名A记录时抛出NXDOMAIN异常
    # 则说明不存在随机子域的A记录 即没有开启泛解析
    except (NXDOMAIN, YXDOMAIN, NoAnswer, NoNameservers) as e:
        logger.log('DEBUG', e.args)
        logger.log('INFOR', f'{domain}没有使用泛解析')
        return False
    except Exception as e:
        logger.log('ALERT', f'探测{domain}是否使用泛解析出错')
        logger.log('FATAL', e.args)
        exit(1)
    else:
        if answer.rrset is None:
            logger.log('ALERT', f'结果无记录重新探测中')
            raise tenacity.TryAgain
        ttl = answer.ttl
        name = answer.name
        ips = {item.address for item in answer}
        logger.log('ALERT', f'{domain}使用了泛解析')
        logger.log('ALERT', f'{domain} 解析到域名: {name} ' f'IP: {ips} TTL: {ttl}')
        return True
Example #12
0
 def begin(self):
     """
     输出模块开始信息
     """
     logger.log('DEBUG', f'开始执行{self.source}模块收集{self.domain}的子域')
Example #13
0
 def send(self, urls, data=None, **kwargs):
     """send request to url.If response 200,return response, else return None."""
     # r_proxy = self.random_proxy()  # 获取随机代理ip
     allow_redirects = False
     is_logger = urls["is_logger"]
     error_data = {"code": 99999, "message": u"重试次数达到上限"}
     self.setHeadersReferer(urls["Referer"])
     if data:
         method = "post"
         # self.setHeaders({"Content-Length": "{0}".format(len(data))})
     else:
         method = "get"
         self.resetHeaders()
     if "is_multipart_data" in urls and urls["is_multipart_data"]:
         data = MultipartEncoder(data)
         self.setHeaders({"Content-Type": data.content_type})
         self.setHeaders(urls.get("headers", {}))
     else:
         self.setHeaders(urls.get("headers", {}))
         # self.setHeaders({"Content-Type": "application/json"})
     if is_logger:
         logger.log(u"url: {0}\n入参: {1}\n请求方式: {2}\n".format(
             urls["req_url"],
             data,
             method,
         ))
     if self.cdn:
         url_host = self.cdn
     else:
         url_host = urls["Host"]
     for i in range(urls["re_try"]):
         try:
             # sleep(urls["s_time"]) if "s_time" in urls else sleep(0.001)
             sleep(urls.get("s_time", 0.001))
             requests.packages.urllib3.disable_warnings()
             response = self._s.request(
                 method=method,
                 timeout=2,
                 url="https://" + url_host + urls["req_url"],
                 data=data,
                 allow_redirects=allow_redirects,
                 verify=False,
                 # proxies=r_proxy,
                 **kwargs)
             if response.status_code == 200 or response.status_code == 201:
                 if response.content:
                     if is_logger:
                         logger.log(u"出参:{0}".format(response.content))
                     return json.loads(
                         response.content
                     ) if urls["is_json"] else response.content
                 else:
                     logger.log(u"url: {} 返回参数为空".format(urls["req_url"]))
                     return error_data
             elif response.status_code == 403:
                 logger.log("ip 被封,{}".format(response.content))
                 sleep(60 * 60 * 2)  # ip被封,自动休眠一小时
             else:
                 sleep(urls["re_time"])
         except (requests.exceptions.Timeout,
                 requests.exceptions.ReadTimeout,
                 requests.exceptions.ConnectionError):
             pass
         except socket.error:
             pass
         except KeyError:
             pass
     return error_data
Example #14
0
def check_net():
    logger.log('INFOR', '正在检查网络环境')
    url = 'http://www.example.com/'
    logger.log('INFOR', f'访问地址 {url}')
    try:
        rsp = requests.get(url)
    except Exception as e:
        logger.log('ERROR', e.args)
        logger.log('ALERT', '访问外网出错 重新检查中')
        raise tenacity.TryAgain
    if rsp.status_code != 200:
        logger.log(
            'ALERT', f'{rsp.request.method} {rsp.request.url} '
            f'{rsp.status_code} {rsp.reason}')
        logger.log('ALERT', '不能正常访问外网 重新检查中')
        raise tenacity.TryAgain
    logger.log('INFOR', '能正常访问外网')
Example #15
0
def save_subdomains(save_path, subdomain_list):
    logger.log('DEBUG', f'正在保存待解析的子域')
    subdomain_data = '\n'.join(subdomain_list)
    if not utils.save_data(save_path, subdomain_data):
        logger.log('FATAL', '保存待解析的子域出错')
        exit(1)
Example #16
0
 def send(self, urls, data=None, **kwargs):
     """send request to url.If response 200,return response, else return None."""
     allow_redirects = False
     is_logger = urls.get("is_logger", False)
     req_url = urls.get("req_url", "")
     re_try = urls.get("re_try", 0)
     s_time = urls.get("s_time", 0)
     http = urls.get("http", "") or "https"
     error_data = {"code": 99999, "message": u"重试次数达到上限"}
     if data:
         method = "post"
         self.setHeaders({"Content-Length": "{0}".format(len(data))})
     else:
         method = "get"
         self.resetHeaders()
     self.setHeadersReferer(urls["Referer"])
     if is_logger:
         logger.log(u"url: {0}\n入参: {1}\n请求方式: {2}\n".format(
             req_url,
             data,
             method,
         ))
     self.setHeadersHost(urls["Host"])
     if self.cdn:
         url_host = self.cdn
     else:
         url_host = urls["Host"]
     for i in range(re_try):
         try:
             # sleep(urls["s_time"]) if "s_time" in urls else sleep(0.001)
             sleep(s_time)
             requests.packages.urllib3.disable_warnings()
             response = self._s.request(method=method,
                                        timeout=3,
                                        proxies=self.proxies,
                                        url=http + "://" + url_host +
                                        req_url,
                                        data=data,
                                        allow_redirects=allow_redirects,
                                        verify=False,
                                        **kwargs)
             if response.status_code == 200:
                 if response.content:
                     if is_logger:
                         logger.log(u"出参:{0}".format(
                             response.content.decode()))
                     return json.loads(response.content.decode(
                     )) if urls["is_json"] else response.content
                 else:
                     logger.log(u"url: {} 返回参数为空".format(urls["req_url"]))
                     return error_data
             elif response.status_code == 403:
                 print(f"当前http请求异常,状态码为{response.status_code}, 休息一会儿")
                 time.sleep(5)
             else:
                 print(f"当前http请求异常,状态码为{response.status_code}")
                 sleep(urls["re_time"])
         except (requests.exceptions.Timeout,
                 requests.exceptions.ReadTimeout,
                 requests.exceptions.ConnectionError, ConnectionResetError,
                 urllib3.exceptions.ProtocolError, TimeoutError,
                 urllib3.exceptions.NewConnectionError) as e:
             print(f"当前代理连接异常,异常ip:{self.proxies}")
         except socket.error as e:
             print(e)
     return error_data
Example #17
0
 def send(self, urls, data=None, **kwargs):
     """send request to url.If response 200,return response, else return None."""
     allow_redirects = False
     is_logger = urls.get("is_logger", False)
     req_url = urls.get("req_url", "")
     re_try = urls.get("re_try", 0)
     s_time = urls.get("s_time", 0)
     is_cdn = urls.get("is_cdn", False)
     is_test_cdn = urls.get("is_test_cdn", False)
     error_data = {"code": 99999, "message": u"重试次数达到上限"}
     if data:
         method = "post"
         self.setHeaders({"Content-Length": "{0}".format(len(data))})
     else:
         method = "get"
         self.resetHeaders()
     self.setHeadersReferer(urls["Referer"])
     if is_logger:
         logger.log(
             u"url: {0}\n入参: {1}\n请求方式: {2}\n".format(req_url, data, method, ))
     self.setHeadersHost(urls["Host"])
     if is_test_cdn:
         url_host = self._cdn
     elif is_cdn:
         if self._cdn:
             # print(u"当前请求cdn为{}".format(self._cdn))
             url_host = self._cdn
         else:
             url_host = urls["Host"]
     else:
         url_host = urls["Host"]
     for i in range(re_try):
         try:
             # sleep(urls["s_time"]) if "s_time" in urls else sleep(0.001)
             sleep(s_time)
             try:
                 requests.packages.urllib3.disable_warnings()
             except:
                 pass
             response = self._s.request(method=method,
                                        timeout=2,
                                        proxies=self._proxies,
                                        url="https://" + url_host + req_url,
                                        data=data,
                                        allow_redirects=allow_redirects,
                                        verify=False,
                                        **kwargs)
             if response.status_code == 200 or response.status_code == 302:
                 if urls.get("not_decode", False):
                     return response.content
                 if response.content:
                     if is_logger:
                         logger.log(
                             u"出参:{0}".format(response.content))
                     if urls["is_json"]:
                         return json.loads(response.content.decode() if isinstance(response.content, bytes) else response.content)
                     else:
                         return response.content.decode("utf8", "ignore") if isinstance(response.content, bytes) else response.content
                 else:
                     logger.log(
                         u"url: {} 返回参数为空".format(urls["req_url"]))
                     return error_data
             else:
                 sleep(urls["re_time"])
         except (requests.exceptions.Timeout, requests.exceptions.ReadTimeout, requests.exceptions.ConnectionError):
             pass
         except socket.error:
             pass
     return error_data
Example #18
0
def check_net():
    logger.log('INFOR', '正在检查网络环境')
    urls = [
        'http://www.example.com', 'http://www.baidu.com',
        'http://www.bing.com', 'http://www.taobao.com',
        'http://www.linkedin.com', 'http://www.msn.com',
        'http://www.apple.com', 'http://microsoft.com'
    ]
    url = random.choice(urls)
    logger.log('INFOR', f'正在尝试访问 {url}')
    try:
        rsp = requests.get(url)
    except Exception as e:
        logger.log('ERROR', e.args)
        logger.log('ALERT', '访问外网出错 重新检查中')
        raise tenacity.TryAgain
    if rsp.status_code != 200:
        logger.log(
            'ALERT', f'{rsp.request.method} {rsp.request.url} '
            f'{rsp.status_code} {rsp.reason}')
        logger.log('ALERT', '不能正常访问外网 重新检查中')
        raise tenacity.TryAgain
    logger.log('INFOR', '能正常访问外网')
Example #19
0
def deal_output(output_paths, ip_times, wildcard_ips, wildcard_ttl):
    logger.log('INFOR', f'正在处理解析结果')
    records = dict()  # 用来记录所有域名解析数据
    subdomains = list()  # 用来保存所有通过有效性检查的子域
    for output_path in output_paths:
        logger.log('DEBUG', f'正在处理{output_path}')
        with open(output_path) as fd:
            for line in fd:
                line = line.strip()
                try:
                    items = json.loads(line)
                except Exception as e:
                    logger.log('ERROR', e.args)
                    logger.log('ERROR', f'解析行{line}出错跳过解析该行')
                    continue
                qname = items.get('name')[:-1]  # 去出最右边的`.`点号
                status = items.get('status')
                if status != 'NOERROR':
                    logger.log('TRACE', f'处理{line}时发现{qname}查询结果状态{status}')
                    continue
                data = items.get('data')
                if 'answers' not in data:
                    logger.log('TRACE', f'处理{line}时发现{qname}返回的结果无应答')
                    continue
                records, subdomains = gen_records(items, records, subdomains,
                                                  ip_times, wildcard_ips,
                                                  wildcard_ttl)
    return records, subdomains
Example #20
0
def export(table, db=None, valid=None, path=None, format='xlsx', show=False):
    """
    OneForAll数据库导出模块

    Example:
        python3 dbexport.py --table name --format csv --path= ./result.csv
        python3 dbexport.py --db result.db --table name --show False

    Note:
        参数port可选值有'small', 'medium', 'large', 'xlarge',详见config.py配置
        参数format可选格式有'csv', 'tsv', 'json', 'yaml', 'html', 'xls', 'xlsx',
                         'dbf', 'latex', 'ods'
        参数path为None会根据format参数和域名名称在项目结果目录生成相应文件

    :param str table:   要导出的表
    :param str db:      要导出的数据库路径(默认为results/result.sqlite3)
    :param int valid:   导出子域的有效性(默认None)
    :param str format:  导出格式(默认xlsx)
    :param str path:    导出路径(默认None)
    :param bool show:   终端显示导出数据(默认False)
    """
    formats = [
        'csv', 'tsv', 'json', 'yaml', 'html', 'latex', 'xls', 'xlsx', 'dbf',
        'ods'
    ]
    if format not in formats:
        logger.log('FATAL', f'不支持{format}格式导出')
        return
    database = Database(db)
    if valid is None:
        rows = database.get_data(table)
    elif isinstance(valid, int):
        rows = database.get_subdomain(table, valid)
    else:
        rows = database.get_data(table)  # 意外情况导出全部子域
    if show:
        print(rows.dataset)
    if not path:
        path = 'export.' + format
    logger.log('INFOR', f'正在将数据库中{table}表导出')
    data = rows.export(format)
    try:
        with open(path, 'w') as file:
            file.write(data)
            logger.log('INFOR', '成功完成导出')
            logger.log('INFOR', path)
    except TypeError:
        with open(path, 'wb') as file:
            file.write(data)
            logger.log('INFOR', '成功完成导出')
            logger.log('INFOR', path)
    except Exception as e:
        logger.log('ERROR', e)
Example #21
0
    def main(self, domain):
        start = time.time()
        logger.log('INFOR', f'正在爆破域名{domain}')
        massdns_dir = config.third_party_dir.joinpath('massdns')
        result_dir = config.result_save_dir
        temp_dir = result_dir.joinpath('temp')
        utils.check_dir(temp_dir)
        massdns_path = get_massdns_path(massdns_dir)
        timestring = utils.get_timestring()

        wildcard_ips = list()  # 泛解析IP列表
        wildcard_ttl = int()  # 泛解析TTL整型值
        ns_list = query_domain_ns(self.domain)
        ns_ip_list = query_domain_ns_a(ns_list)  # DNS权威名称服务器对应A记录列表
        self.enable_wildcard = detect_wildcard(domain, ns_ip_list)

        if self.enable_wildcard:
            wildcard_ips, wildcard_ttl = collect_wildcard_record(
                domain, ns_ip_list)
        ns_path = get_nameservers_path(self.enable_wildcard, ns_ip_list)

        dict_set = self.gen_brute_dict(domain)
        dict_len = len(dict_set)
        dict_data = '\n'.join(dict_set)
        del dict_set
        gc.collect()

        dict_name = f'generated_subdomains_{domain}_{timestring}.txt'
        dict_path = temp_dir.joinpath(dict_name)
        save_brute_dict(dict_path, dict_data)
        del dict_data
        gc.collect()

        output_name = f'resolved_result_{domain}_{timestring}.json'
        output_path = temp_dir.joinpath(output_name)
        log_path = result_dir.joinpath('massdns.log')
        check_dict()

        logger.log('INFOR', f'开始执行massdns')
        do_brute(massdns_path,
                 dict_path,
                 ns_path,
                 output_path,
                 log_path,
                 process_num=self.process_num,
                 concurrent_num=self.concurrent_num)
        logger.log('INFOR', f'结束执行massdns')

        ip_times = stat_ip_times(output_path)
        self.records, self.subdomains = deal_result(output_path, ip_times,
                                                    wildcard_ips, wildcard_ttl)
        delete_file(dict_path, output_path)
        end = time.time()
        self.elapse = round(end - start, 1)
        logger.log(
            'INFOR', f'{self.source}模块耗时{self.elapse}秒'
            f'发现{domain}的子域{len(self.subdomains)}个')
        logger.log('DEBUG', f'{self.source}模块发现{domain}的子域:\n'
                   f'{self.subdomains}')
        self.gen_result(brute=dict_len, valid=len(self.subdomains))
        self.save_db()
        return self.subdomains
Example #22
0
    def main(self):
        if self.brute is None:
            self.brute = config.enable_brute_module
        if self.dns is None:
            self.dns = config.enable_dns_resolve
        if self.req is None:
            self.req = config.enable_http_request
        old_table = self.domain + '_last'
        new_table = self.domain + '_now'
        collect = Collect(self.domain, export=False)
        collect.run()
        if self.brute:
            # 由于爆破会有大量dns解析请求 并发爆破可能会导致其他任务中的网络请求异常
            brute = AIOBrute(self.domain, export=False)
            brute.run()

        db = Database()
        db.copy_table(self.domain, self.domain + '_ori')
        db.remove_invalid(self.domain)
        db.deduplicate_subdomain(self.domain)

        old_data = []
        # 非第一次收集子域的情况时数据库预处理
        if db.exist_table(new_table):
            db.drop_table(old_table)  # 如果存在上次收集结果表就先删除
            db.rename_table(new_table, old_table)  # 新表重命名为旧表
            old_data = db.get_data(old_table).as_dict()

        # 不解析子域直接导出结果
        if not self.dns:
            # 数据库导出
            self.valid = None
            dbexport.export(self.domain,
                            valid=self.valid,
                            format=self.format,
                            show=self.show)
            db.drop_table(new_table)
            db.rename_table(self.domain, new_table)
            db.close()
            return

        self.data = db.get_data(self.domain).as_dict()

        # 标记新发现子域
        self.data = utils.mark_subdomain(old_data, self.data)

        loop = asyncio.get_event_loop()
        asyncio.set_event_loop(loop)

        # 解析子域
        task = resolve.bulk_query_a(self.data)
        self.data = loop.run_until_complete(task)

        # 保存解析结果
        resolve_table = self.domain + '_res'
        db.drop_table(resolve_table)
        db.create_table(resolve_table)
        db.save_db(resolve_table, self.data, 'resolve')

        # 不请求子域直接导出结果
        if not self.req:
            # 数据库导出
            self.valid = None
            dbexport.export(self.domain,
                            valid=self.valid,
                            format=self.format,
                            show=self.show)
            db.drop_table(new_table)
            db.rename_table(self.domain, new_table)
            db.close()
            return

        # 请求子域
        task = request.bulk_get_request(self.data, self.port)
        self.data = loop.run_until_complete(task)
        self.datas.extend(self.data)
        # 在关闭事件循环前加入一小段延迟让底层连接得到关闭的缓冲时间
        loop.run_until_complete(asyncio.sleep(0.25))
        valid_count = len(
            list(filter(lambda item: item.get('valid') == 1, self.data)))
        logger.log('INFOR', f'经验证{self.domain}有效子域{valid_count}个')

        # 保存请求结果
        db.clear_table(self.domain)
        db.save_db(self.domain, self.data, 'request')

        # 数据库导出
        dbexport.export(self.domain,
                        valid=self.valid,
                        format=self.format,
                        show=self.show)
        db.drop_table(new_table)
        db.rename_table(self.domain, new_table)
        db.close()

        # 子域接管检查
        if self.takeover:
            subdomains = set(map(lambda x: x.get('subdomain'), self.data))
            takeover = Takeover(subdomains)
            takeover.run()
Example #23
0
    def run(self, rx_queue=None):
        self.domains = utils.get_domains(self.target)
        while self.domains:
            self.domain = self.domains.pop()
            start = time.time()
            db_conn = database.connect_db()
            table_name = self.domain.replace('.', '_')
            database.create_table(db_conn, table_name)
            if not rx_queue:
                rx_queue = queue.Queue()
            logger.log('INFOR', f'开始执行{self.source}模块爆破域名{self.domain}')
            logger.log(
                'INFOR',
                f'{self.source}模块使用{self.processes}个进程乘{self.coroutine}个协程')
            # logger.log('INFOR', f'{self.source}模块使用个进程乘{self.coroutine}个协程')
            if self.recursive_brute and not self.fuzz:  # fuzz模式不使用递归爆破
                logger.log('INFOR', f'开始递归爆破{self.domain}的第1层子域')
            loop = asyncio.get_event_loop()
            asyncio.set_event_loop(loop)
            loop.run_until_complete(self.main(self.domain, rx_queue))

            # 递归爆破下一层的子域
            if self.recursive_brute and not self.fuzz:  # fuzz模式不使用递归爆破
                for layer_num in range(
                        1,
                        self.recursive_depth):  # 之前已经做过1层子域爆破 当前实际递归层数是layer+1
                    logger.log('INFOR',
                               f'开始递归爆破{self.domain}的第{layer_num + 1}层子域')
                    for subdomain in self.subdomains.copy():
                        if subdomain.count('.') - self.domain.count(
                                '.') == layer_num:  # 进行下一层子域爆破的限制条件
                            loop.run_until_complete(
                                self.main(subdomain, rx_queue))

            while not rx_queue.empty():  # 队列不空就一直取数据存数据库
                database.save_db(db_conn, table_name,
                                 rx_queue.get())  # 将结果存入数据库中
            database.copy_table(db_conn, table_name)
            database.deduplicate_subdomain(db_conn, table_name)
            database.remove_invalid(db_conn, table_name)

            end = time.time()
            self.elapsed = round(end - start, 1)
            logger.log('INFOR', f'结束执行{self.source}模块爆破域名{self.domain}')
            logger.log(
                'INFOR',
                f'{self.source}模块耗时{self.elapsed}秒发现{self.domain}的域名{len(self.subdomains)}个'
            )
            logger.log(
                'DEBUG',
                f'{self.source}模块发现{self.domain}的的域名 {self.subdomains}')
Example #24
0
 def check(self):
     """
     正则匹配响应头中的内容安全策略字段以发现子域名
     :return: None
     """
     resolver = resolve.dns_resolver()
     try:
         answers = resolver.query(self.domain, "NS")
     except Exception as e:
         logger.log('ERROR', e)
         return
     self.nsservers = [str(answer) for answer in answers]
     if not len(self.nsservers):
         logger.log('ALERT', f'没有找到{self.domain}的NS域名服务器记录')
         return
     for nsserver in self.nsservers:
         logger.log('DEBUG', f'正在尝试对{self.domain}的域名服务器{nsserver}进行域传送')
         try:
             xfr = dns.query.xfr(nsserver, self.domain)
             zone = dns.zone.from_xfr(xfr)
         except Exception as e:
             logger.log('DEBUG', str(e))
             logger.log('INFOR', f'对{self.domain}的域名服务器{nsserver}进行域传送失败')
             continue
         else:
             names = zone.nodes.keys()
             for name in names:
                 subdomain = utils.match_subdomain(
                     self.domain,
                     str(name) + '.' + self.domain)
                 self.subdomains = self.subdomains.union(subdomain)
                 record = zone[name].to_text(name)
                 self.results.append(record)
         if self.results:
             logger.log('INFOR', f'发现{self.domain}在{nsserver}上的域传送记录')
             logger.log('DEBUG', '\n'.join(self.results))
             self.results = []
Example #25
0
def gen_fuzz_domains(domain, rule):
    """
    生成fuzz模式下即将用于爆破的子域集合

    :param str domain: 待爆破的主域
    :param str rule: 用于爆破的正则规则
    :return: 用于爆破的子域集合
    """
    domains = set()
    if '[fuzz]' not in domain:
        logger.log('FATAL', f'没有指定fuzz位置')
        return domains
    if not rule:
        logger.log('FATAL', f'没有指定fuzz规则')
        return domains
    fuzz_count = exrex.count(rule)
    if fuzz_count > 2000000:
        logger.log('FATAL', f'fuzz规则范围太大:{fuzz_count} > 2000000')
        return domains
    logger.log('INFOR', f'fuzz字典大小:{fuzz_count}')
    for i in range(3):
        random_domain = domain.replace('[fuzz]', exrex.getone(rule))
        logger.log('ALERT', f'请注意检查随机生成的{random_domain}是否正确')
    logger.log('ALERT', f'你有6秒检查时间退出使用`CTRL+C`')
    try:
        time.sleep(6)
    except KeyboardInterrupt:
        logger.log('INFOR', '爆破终止')
        exit(0)
    parts = domain.split('[fuzz]')
    for fuzz in exrex.generate(rule):
        fuzz_domain = parts[0] + fuzz + parts[1]
        domains.add(fuzz_domain)
    return domains
Example #26
0
 def check_brute_params(self):
     if not (self.word or self.fuzz):
         logger.log('FATAL', f'请至少指定一种爆破模式')
         exit(1)
     if len(self.domains) > 1:
         self.bulk = True
     if self.fuzz:
         if self.place is None or self.rule is None:
             logger.log('FATAL', f'没有指定fuzz位置或规则')
             exit(1)
         if self.bulk:
             logger.log('FATAL', f'批量爆破的场景下不能使用fuzz模式')
             exit(1)
         if self.recursive_brute:
             logger.log('FATAL', f'使用fuzz模式下不能使用递归爆破')
             exit(1)
         fuzz_count = self.place.count('*')
         if fuzz_count < 1:
             logger.log('FATAL', f'没有指定fuzz位置')
             exit(1)
         if fuzz_count > 1:
             logger.log('FATAL', f'只能指定1个fuzz位置')
             exit(1)
         if self.domain not in self.place:
             logger.log('FATAL', f'指定fuzz的域名有误')
             exit(1)
Example #27
0
def get_wildcard_record(domain, resolver):
    logger.log('INFOR', f'查询{domain}在权威DNS名称服务器的泛解析记录')
    try:
        answer = resolver.query(domain, 'A')
    # 如果查询随机域名A记录时抛出Timeout异常则重新查询
    except Timeout as e:
        logger.log('ALERT', f'查询超时重新查询中')
        logger.log('DEBUG', e.args)
        raise tenacity.TryAgain
    except (NXDOMAIN, YXDOMAIN, NoAnswer, NoNameservers) as e:
        logger.log('DEBUG', e.args)
        logger.log('INFOR', f'{domain}在权威DNS名称服务器上没有A记录')
        return None, None
    except Exception as e:
        logger.log('ERROR', e.args)
        logger.log('ERROR', f'查询{domain}在权威DNS名称服务器泛解析记录出错')
        exit(1)
    else:
        if answer.rrset is None:
            logger.log('DEBUG', f'查询结果无记录')
            return None, None
        name = answer.name
        ip = {item.address for item in answer}
        ttl = answer.ttl
        logger.log('INFOR', f'{domain} 在权威DNS上解析到域名: {name} '
                   f'IP: {ip} TTL: {ttl}')
        return ip, ttl
Example #28
0
def check_dir(dir_path):
    if not dir_path.exists():
        logger.log('INFOR', f'不存在{dir_path}目录将会新建')
        dir_path.mkdir(parents=True, exist_ok=True)
Example #29
0
def save_brute_dict(dict_path, dict_set):
    dict_data = '\n'.join(dict_set)
    if not utils.save_data(dict_path, dict_data):
        logger.log('FATAL', '保存生成的字典出错')
        exit(1)
Example #30
0
    def subdomain_result(self):
        sub_file = pathlib.Path(__file__).parent.resolve().joinpath(
            'results', self.domain + '_subdomain.json')
        if not sub_file.is_file():
            logger.log('ALERT', '子域名文件:%s未发现' % sub_file)
            return None
        with open(sub_file, 'r', encoding='utf-8') as file:
            tmp_str = file.read()
        try:
            tmp_dict = json.loads(tmp_str)
        except:
            logger.log('ALERT', '子域名文件:%s解析json格式错误' % sub_file)
            return None
        sql_fie = str(pathlib.Path(__file__).parent.joinpath('results'))
        try:
            shutil.rmtree(sql_fie)
        except:
            print('[-]删除子域名查找结果文件失败')
        logger.log('INFOR',
                   '[%s]子域名搜集完成,共%s个' % (self.domain, str(len(tmp_dict))))
        IP_list = []
        for tmp in tmp_dict:
            subdomain1 = tmp['subdomain']  # 取子域名
            if self.__check_subdomain(subdomain1):
                print('[-]已存在%s子域名' % subdomain1)
                continue
            subdomain_ip = self.__domiantoip(subdomain1)  # 子域名转IP
            if subdomain_ip:  # 转ip成功
                if not self.__Deduplication(subdomain_ip, IP_list):  # 检测IP是否重复
                    ip_dict = self.__shodan_ip(subdomain_ip)  # 进行shodan查询
                    time.sleep(1)  # shodan查询延迟
                    IP_list.append(subdomain_ip)
                    if ip_dict:  # shodan查询成功
                        self.__add_srsubdomain(subdomain=subdomain1,
                                               domain=self.domain,
                                               domain_name=self.domain_name,
                                               subdomain_ip=subdomain_ip,
                                               city=ip_dict['city'])
                        print('[+]执行HTTP探测:%s' % subdomain1)
                        port_list = ip_dict['ports']  # 取所有端口进行http访问
                        tmp_urls = self.__http(subdomain1, port_list)
                        for port in port_list:
                            for key, vaule in port.items():
                                self.__add_port(subdomain=subdomain1,
                                                port=key,
                                                product=vaule['product'],
                                                version=vaule['version'],
                                                data=vaule['data'])
                        for key, value in tmp_urls.items():
                            self.__add_srcurl(subdomain=subdomain1,
                                              url=value['url'],
                                              title=value['title'])

                        continue
            info_dict = self.__check1(subdomain1)
            if info_dict:  # 不记录无IP无网站的域名
                self.__add_srsubdomain(subdomain=subdomain1,
                                       domain=self.domain,
                                       domain_name=self.domain_name,
                                       subdomain_ip='',
                                       city='')
                self.__add_srcurl(subdomain=subdomain1,
                                  url=list(info_dict.values())[0]['url'],
                                  title=list(info_dict.values())[0]['title'])
        return True
Example #31
0
 def gen_result(self, find=0, brute=None, valid=0):
     """
     生成结果
     """
     logger.log('DEBUG', f'正在生成最终结果')
     if not len(self.subdomains):  # 该模块一个子域都没有发现的情况
         result = {'id': None,
                   'type': self.type,
                   'alive': None,
                   'request': None,
                   'resolve': None,
                   'new': None,
                   'url': None,
                   'subdomain': None,
                   'level': None,
                   'cname': None,
                   'content': None,
                   'public': None,
                   'port': None,
                   'status': None,
                   'reason': None,
                   'title': None,
                   'banner': None,
                   'header': None,
                   'response': None,
                   'times': None,
                   'ttl': None,
                   'resolver': None,
                   'module': self.module,
                   'source': self.source,
                   'elapse': self.elapse,
                   'find': find,
                   'brute': brute,
                   'valid': valid}
         self.results.append(result)
     else:
         for subdomain in self.subdomains:
             url = 'http://' + subdomain
             level = subdomain.count('.') - self.domain.count('.')
             record = self.records.get(subdomain)
             if record is None:
                 record = dict()
             resolve = record.get('resolve')
             request = record.get('request')
             alive = record.get('alive')
             if self.type != 'A':  # 不是利用的DNS记录的A记录查询子域默认都有效
                 resolve = 1
                 request = 1
                 alive = 1
             reason = record.get('reason')
             resolver = record.get('resolver')
             cname = record.get('cname')
             content = record.get('content')
             times = record.get('times')
             ttl = record.get('ttl')
             public = record.get('public')
             if isinstance(cname, list):
                 cname = ','.join(cname)
                 content = ','.join(content)
                 times = ','.join([str(num) for num in times])
                 ttl = ','.join([str(num) for num in ttl])
                 public = ','.join([str(num) for num in public])
             result = {'id': None,
                       'type': self.type,
                       'alive': alive,
                       'request': request,
                       'resolve': resolve,
                       'new': None,
                       'url': url,
                       'subdomain': subdomain,
                       'level': level,
                       'cname': cname,
                       'content': content,
                       'public': public,
                       'port': 80,
                       'status': None,
                       'reason': reason,
                       'title': None,
                       'banner': None,
                       'header': None,
                       'response': None,
                       'times': times,
                       'ttl': ttl,
                       'resolver': resolver,
                       'module': self.module,
                       'source': self.source,
                       'elapse': self.elapse,
                       'find': find,
                       'brute': brute,
                       'valid': valid,
                       }
             self.results.append(result)
Example #32
0
def do_query_a(domain):
    resolver = resolve.dns_resolver()
    try:
        answer = resolver.query(domain, 'A')
    # 如果查询随机域名A记录时抛出Timeout异常则重新探测
    except Timeout as e:
        logger.log('ALERT', f'探测超时重新探测中')
        logger.log('DEBUG', e.args)
        raise TryAgain
    # 如果查询随机域名A记录时抛出NXDOMAIN,YXDOMAIN,NoAnswer,NoNameservers异常
    # 则说明不存在随机子域的A记录 即没有开启泛解析
    except Exception as e:
        logger.log('DEBUG', e.args)
        logger.log('INFOR', f'{domain}没有使用泛解析')
        return False
    if isinstance(answer, Answer):
        ttl = answer.ttl
        name = answer.name
        ips = {item.address for item in answer}
        logger.log('ALERT', f'{domain}使用了泛解析')
        logger.log('ALERT', f'{domain} 解析到域名: {name} ' f'IP: {ips} TTL: {ttl}')
        return True
Example #33
0
def delete_file(*paths):
    for path in paths:
        try:
            path.unlink()
        except Exception as e:
            logger.log('ERROR', e.args)
Example #34
0
    def run(self):
        start = time.time()
        logger.log('INFOR', f'开始执行{self.source}模块')
        self.subdomains = utils.get_domains(self.target)
        self.format = utils.check_format(self.format, len(self.subdomains))
        timestamp = utils.get_timestamp()
        name = f'takeover_check_result_{timestamp}'
        self.path = utils.check_path(self.path, name, self.format)
        if self.subdomains:
            logger.log('INFOR', f'正在检查子域接管风险')
            self.fingerprints = get_fingerprint()
            self.results.headers = ['subdomain', 'cname']
            # 创建待检查的子域队列
            for domain in self.subdomains:
                self.subdomainq.put(domain)
            # 检查线程
            for _ in range(self.thread):
                check_thread = Thread(target=self.check, daemon=True)
                check_thread.start()
            # 进度线程
            progress_thread = Thread(target=self.progress, daemon=True)
            progress_thread.start()

            self.subdomainq.join()
            self.save()
        else:
            logger.log('FATAL', f'获取域名失败')
        end = time.time()
        elapse = round(end - start, 1)
        logger.log(
            'INFOR', f'{self.source}模块耗时{elapse}秒'
            f'发现{len(self.results)}个子域存在接管风险')
        logger.log('INFOR', f'子域接管风险检查结果 {self.path}')
        logger.log('INFOR', f'结束执行{self.source}模块')
Example #35
0
 def send(self, urls, data=None, **kwargs):
     """send request to url.If response 200,return response, else return None."""
     allow_redirects = False
     is_logger = urls.get("is_logger", False)
     req_url = urls.get("req_url", "")
     re_try = urls.get("re_try", 0)
     s_time = urls.get("s_time", 0)
     is_cdn = urls.get("is_cdn", False)
     is_test_cdn = urls.get("is_test_cdn", False)
     error_data = {"code": 99999, "message": u"重试次数达到上限"}
     if data:
         method = "post"
         self.setHeaders({"Content-Length": "{0}".format(len(data))})
     else:
         method = "get"
         self.resetHeaders()
     self.setHeadersReferer(urls["Referer"])
     if is_logger:
         logger.log(u"url: {0}\n入参: {1}\n请求方式: {2}\n".format(
             req_url,
             data,
             method,
         ))
     self.setHeadersHost(urls["Host"])
     if is_test_cdn:
         url_host = self._cdn
     elif is_cdn:
         if self._cdn:
             # print(u"当前请求cdn为{}".format(self._cdn))
             url_host = self._cdn
         else:
             url_host = urls["Host"]
     else:
         url_host = urls["Host"]
     http = urls.get("httpType") or "https"
     for i in range(re_try):
         try:
             # sleep(urls["s_time"]) if "s_time" in urls else sleep(0.001)
             sleep(s_time)
             try:
                 requests.packages.urllib3.disable_warnings()
             except:
                 pass
             response = self._s.request(method=method,
                                        timeout=2,
                                        proxies=self._proxies,
                                        url=http + "://" + url_host +
                                        req_url,
                                        data=data,
                                        allow_redirects=allow_redirects,
                                        verify=False,
                                        **kwargs)
             if response.status_code == 200 or response.status_code == 302:
                 if urls.get("not_decode", False):
                     return response.content
                 if response.content:
                     if is_logger:
                         logger.log(u"出参:{0}".format(response.content))
                     if urls["is_json"]:
                         return json.loads(response.content.decode(
                         ) if isinstance(response.content, bytes
                                         ) else response.content)
                     else:
                         return response.content.decode(
                             "utf8", "ignore") if isinstance(
                                 response.content,
                                 bytes) else response.content
                 else:
                     logger.log(u"url: {} 返回参数为空".format(urls["req_url"]))
                     continue
             else:
                 sleep(urls["re_time"])
         except (requests.exceptions.Timeout,
                 requests.exceptions.ReadTimeout,
                 requests.exceptions.ConnectionError):
             pass
         except socket.error:
             pass
     return error_data
Example #36
0
def save_brute_dict(path, data):
    if not utils.save_data(path, data):
        logger.log('FATAL', '保存生成的字典出错')
        exit(1)
Example #37
0
def deal_output(output_path):
    logger.log('INFOR', f'正在处理解析结果')
    records = dict()  # 用来记录所有域名解析数据
    with open(output_path) as fd:
        for line in fd:
            line = line.strip()
            try:
                items = json.loads(line)
            except Exception as e:
                logger.log('ERROR', e.args)
                logger.log('ERROR', f'解析行{line}出错跳过解析该行')
                continue
            record = dict()
            record['resolver'] = items.get('resolver')
            qname = items.get('name')[:-1]  # 去出最右边的`.`点号
            status = items.get('status')
            if status != 'NOERROR':
                record['alive'] = 0
                record['resolve'] = 0
                record['reason'] = status
                records[qname] = record
                continue
            data = items.get('data')
            if 'answers' not in data:
                record['alive'] = 0
                record['resolve'] = 0
                record['reason'] = 'NOANSWER'
                records[qname] = record
                continue
            flag = False
            cname = list()
            ips = list()
            public = list()
            ttls = list()
            answers = data.get('answers')
            for answer in answers:
                if answer.get('type') == 'A':
                    flag = True
                    cname.append(answer.get('name')[:-1])  # 去出最右边的`.`点号
                    ip = answer.get('data')
                    ips.append(ip)
                    ttl = answer.get('ttl')
                    ttls.append(str(ttl))
                    is_public = utils.ip_is_public(ip)
                    public.append(str(is_public))
                    record['resolve'] = 1
                    record['reason'] = status
                    record['cname'] = ','.join(cname)
                    record['content'] = ','.join(ips)
                    record['public'] = ','.join(public)
                    record['ttl'] = ','.join(ttls)
                    records[qname] = record
            if not flag:
                record['alive'] = 0
                record['resolve'] = 0
                record['reason'] = 'NOARECORD'
                records[qname] = record
    if not records:
        logger.log('FATAL', f'无有效解析结果')
        exit(1)
    return records
Example #38
0
def export(table, db=None, valid=None, path=None, format='xlsx', output=False):
    """
    OneForAll数据库导出模块

    Example:
        python dbexport.py --table name --format csv --path= ./result.csv
        python dbexport.py --db result.db --table name --output False

    Note:
        参数valid可选值1,0,None,分别表示导出有效,无效,全部子域
        参数format可选格式:'csv', 'tsv', 'json', 'yaml', 'html', 'xls', 'xlsx',
                         'dbf', 'latex', 'ods'
        参数path为None会根据format参数和域名名称在项目结果目录生成相应文件

    :param str table:   要导出的表
    :param str db:      要导出的数据库路径(默认为results/result.sqlite3)
    :param int valid:   导出子域的有效性(默认None)
    :param str format:  导出格式(默认xlsx)
    :param str path:    导出路径(默认None)
    :param bool output: 是否将导出数据输出到终端(默认False)
    """
    database = Database(db)
    if valid is None:
        rows = database.get_data(table)
    elif isinstance(valid, int):
        rows = database.get_subdomain(table, valid)
    else:
        rows = database.get_data(table)  # 意外情况导出全部子域
    if output:
        print(rows.dataset)
    if not path:
        path = 'export.' + format
    logger.log('INFOR', f'正在将数据库中{table}表导出')
    try:
        with open(path, 'w') as file:
            file.write(rows.export(format))
            logger.log('INFOR', '成功完成导出')
            logger.log('INFOR', path)
    except TypeError:
        with open(path, 'wb') as file:
            file.write(rows.export(format))
            logger.log('INFOR', '成功完成导出')
            logger.log('INFOR', path)
    except Exception as e:
        logger.log('ERROR', e)