def check_alerts(self, target):
        if len(self.alerts) > 0:
            cprint_red('{} 被识别为潜在恶意IP'.format(target))
            # cprint_red(str(self.alerts))
        else:
            cprint_cyan('{} 未知或非恶意IP'.format(target))

        # 将IP威胁事件格式化为字典形式
        entity_dic = {}
        # alerts_dic = {}
        alerts_lst = []

        idx = 1
        for item in self.alerts:
            key = 'pulse' + str(idx)
            value = item.split(':')[1].strip()
            # alerts_dic[key] = value
            alerts_lst.append(value)
            idx += 1
        #
        entity_dic['ip'] = target
        entity_dic['alerts'] = alerts_lst
        entity_dic['alerts_total'] = len(alerts_lst)
        entity_dic['sequence'] = self.entity_num

        self.results.append(entity_dic)
        json_str = json.dumps(entity_dic, sort_keys=False, indent=4)
        print(json_str)
        print()
def search_from_feeds(target_list):
    feed_lists = os.listdir(REPUTATION_DB_PATH)

    feed_threat_values = []
    cprint_cyan("Searching malicious ip from thread feeds ...")

    # 遍历所有feed数据,将威胁数据添加至列表
    for feed_file in feed_lists:
        if not os.path.isdir(feed_file):
            try:
                with open(REPUTATION_DB_PATH + os.sep + feed_file) as f:
                    feed_content = f.readlines()
            except Exception as e:
                cprint_red("Open feed data failed")

        for line in feed_content:
            feed_entity = json.loads(line)
            try:
                feed_threat_values.append(feed_entity['value'])
            except KeyError as e:
                pass
            except TypeError as e:
                # cprint_red(e)
                continue

    feed_threat_total = len(feed_threat_values)
    cprint_cyan('total {} data in all feeds'.format(feed_threat_total))

    for item in target_list:
        item = item.strip()
        check_match(item, feed_threat_values)
    def fetch_feed_content(self, feed_id):
        self.feed_id = int(feed_id)
        self.retrieve_feed_name_by_id(self.feed_id)

        req_header = dict()
        req_header['ACCEPT'] = 'application/json'
        req_header['Content-Type'] = 'application/json'
        req_header['Authorization'] = MISP_API_KEY

        cprint_cyan("Fetching threat from feed{}-{}".format(
            self.feed_id, self.feed_name))

        feeds_preview_url = MISP_SERVER + '/feeds/previewIndex/{}/page:'.format(
            self.feed_id)
        try:
            rep = requests.get(feeds_preview_url,
                               headers=req_header,
                               verify=False)
            self.feed_entities = json.loads(rep.content)
            feed_entity_num = len(self.feed_entities)
            cprint_cyan("feed{}-{}中共获取到{}条数据".format(self.feed_id,
                                                     self.feed_name,
                                                     feed_entity_num))
        except Exception as e:
            cprint_red("feed请求失败")
            # cprint_red(e)

        return self.feed_entities
def save_file(content, outfile='malicious_items.txt'):
    try:
        with open(outfile, 'w') as f:
            for line in content:
                f.writelines(json.dumps(line))
                f.writelines("\n")
    except Exception as e:
        cprint_red(e)
def print_banner():
    logo = r""" 
         _____ _                    _   _   _                       _
        |_   _| |__  _ __ ___  __ _| |_| | | | ___  _   _ _ __   __| |
          | | | '_ \| '__/ _ \/ _` | __| |_| |/ _ \| | | | '_ \ / _` |
          | | | | | | | |  __/ (_| | |_|  _  | (_) | |_| | | | | (_| |
          |_| |_| |_|_|  \___|\__,_|\__|_| |_|\___/ \__,_|_| |_|\__,_|
        """
    cprint_red(logo)
    cprint()
Exemple #6
0
 def download_vod(self, filename, url):
     cprint_green("Downloading vod for {}".format(url))
     chunk_size = 1024
     try:
         r = requests.get(url, stream=True)
     except RequestException as e:
         cprint_red("Could not open URL: {}".format(url))
     with open(filename + ".mp4", "wb") as f:
         for chunk in r.iter_content(chunk_size=chunk_size):
             if not chunk:
                 continue
             f.write(chunck)
             f.flush()
     cprint_green("File downloaded succesfully, at filename: {}.mp4".format(
         filename))
     subprocess.run('rclone move {} milo:milo/b/Twitch/{}'.format(
         filename, self.name))
     return True
def parse_input_file(inputfile):
    item_list = []

    try:
        with open(inputfile, "r") as fd:
            content_lines = fd.readlines()
    except:
        cprint_red("文件{}打开失败".format(inputfile))
        sys.exit(1)

    # 读取ip列表文件,集合去重后生成ip列表
    content_lines = list(set(content_lines))
    content_length = len(content_lines)
    cprint_cyan("[*] Input file contains [{}] items".format(content_length))

    # 将文件分块处理
    chunk_size = MAX_LINES
    # ip记录数小于能处理的最大记录数
    if content_length < MAX_LINES:
        # ip记录数小于cpu核心,则并发为1,否则将记录数平均分配至每个cpu核心
        if content_length / CPU_CORES < 1:
            chunk_size = 1
        else:
            chunk_size = content_length // CPU_CORES
    tmp = []
    for index in range(0, content_length, chunk_size):
        # 将记录分成固定长度的小块,最后一块包含剩余的所有记录
        if index + chunk_size > content_length:
            tmp.append(content_lines[index:])
        else:
            tmp.append(content_lines[index:index + chunk_size])
            index += chunk_size
    content_lines = tmp

    pool = Pool(CPU_CORES)
    rest = pool.map(query_type_parse, content_lines)
    for i in rest:
        item_list.extend(i)

    return item_list
def print_flaged_item():
    item_total = len(detect_results)

    cprint_red('###### Malicious objects ######')
    for item in detect_results:
        cprint_red('{}'.format(item))
    print()
    cprint_red("Total: {}".format(item_total))

    if len(failed_ips) > 0:
        print("###### FAILED TO PROCESS ######")
        for ip in failed_ips:
            cprint_yellow('%s' % ip)
Exemple #9
0
 def make_request(self, url):
     try:
         r = requests.get(url, headers={"Client-id": CLIENT_ID})
     except RequestException:
         cprint_red("Bad internet detected, exiting program")
     return json_parse(r.text)
Exemple #10
0
def main():
    try:
        print_banner()

        args = arg_parse()
        if args['list']:
            for f in threat_feeds_db:
                cprint_cyan("{}. {} - {}\n".format(f['feed_id'],
                                                   f['feed_name'],
                                                   f['feed_url']))
            sys.exit(1)
        if args['feed_id']:
            cache_mode = True

        if args['ip']:
            target_ip = args['ip']
        elif args['domain']:
            target_domain = args['domain']
        elif args['ipfile']:
            inputFile = args['ipfile']

        if args['output']:
            save_file_flag = True
            output_file = args['output']

        if args['local']:
            local_mode = True
            online_mode = False
            cache_mode = False
        elif args['online']:
            online_mode = True
            local_mode = False
            cache_mode = False
        else:
            cache_mode = True
            local_mode = False
            online_mode = False

        if args['db']:
            REPUTATION_DB_PATH = args['db']
            if not os.path.isdir(REPUTATION_DB_PATH):
                os.mkdir(REPUTATION_DB_PATH)

        # cache模式,先下载feed情报数据
        if cache_mode:
            #
            if 'all' in args['feed_id']:
                feed_ids = [f['feed_id'] for f in threat_feeds_db]

                for f_id in feed_ids:
                    download_handler = threading.Thread(name='download_feeds',
                                                        target=download_feeds,
                                                        args=[f_id])
                    download_handler.setDaemon(True)
                    download_handler.start()
                    download_handler.join()
            else:
                if len(args['feed_id']) == 1:
                    start_id = int(args['feed_id'][0])
                    stop_id = start_id + 1
                elif len(args['feed_id']) == 2:
                    start_id = int(args['feed_id'][0])
                    stop_id = int(args['feed_id'][1]) + 1
                else:
                    cprint_red('Invaild feed id!')
                    sys.exit(1)

                for f_id in range(start_id, stop_id, 1):
                    download_handler = threading.Thread(name='download',
                                                        target=download_feeds,
                                                        args=[f_id])
                    download_handler.setDaemon(True)
                    download_handler.start()
                    download_handler.join()

            # 如果缓存数据,但未指定ip或ipfile,则缓存完后退出
            if not ('target_ip' in locals() and 'target_domain' in locals()
                    and 'inputFile' in locals()):
                sys.exit(0)

        # 离线查询模式直接进行情报搜索
        if local_mode:
            try:
                if 'target_ip' in locals():
                    target_list = [target_ip]
                elif 'target_domain' in locals():
                    target_list = [target_domain]
                else:
                    target_list = parse_input_file(inputFile)

                # 针对目标进行低信誉ip检索
                search_from_feeds(target_list)
                # detecting_threat_from_feeds(target_list)
                print_flaged_item()
                if 'save_file_flag' in locals():
                    save_file(detect_results, output_file)
            except:
                cprint_red("Error")
                traceback.print_exc()

        # AlienVault OTX进行在线查询
        if online_mode:
            query_type = args['query_type']

            otx = otx_reputation()
            if 'target_ip' in locals():
                otx.detect_ip(target_ip)
                otx.check_alerts(target_ip)
                content = otx.results.pop()
                otx.entity_num += 1

                if 'save_file_flag' in locals():
                    otx.save_result(content, outfile=output_file)

            if 'target_domain' in locals():
                otx.detect_domain(target_domain)
                otx.check_alerts(target_domain)
                content = otx.results.pop()
                otx.entity_num += 1

                if 'save_file_flag' in locals():
                    otx.save_result(content, outfile=output_file)

            if 'inputFile' in locals():
                target_list = parse_input_file(inputFile)
                if query_type is None:
                    cprint_red(
                        "Please specify --type options when using online query mode!"
                    )
                    sys.exit(1)
                if query_type.upper() == 'IP':
                    for ip in target_list:
                        ip = ip.strip()
                        otx.detect_ip(ip)
                        otx.check_alerts(ip)
                        content = otx.results.pop()
                        if 'save_file_flag' in locals():
                            otx.save_result(content, outfile=output_file)
                        # ip处理结束即重置ip的情报计数
                        otx.alerts = []
                        otx.entity_num += 1
                elif query_type.upper() == 'DOMAIN':
                    for domain in target_list:
                        domain = domain.strip()
                        otx.detect_domain(domain)
                        otx.check_alerts(domain)
                        content = otx.results.pop()
                        if 'save_file_flag' in locals():
                            otx.save_result(content, outfile=output_file)
                        # ip处理结束即重置ip情报数
                        otx.alerts = []
                        otx.entity_num += 1
    except:
        pass
Exemple #11
0
def check_match(item, db):
    # 如果结果字典中不包含malicious条目,则初始化
    if item in db:
        cprint_red("Malicious item detected: {}".format(item))
        if item != "":
            detect_results.append(item)
Exemple #12
0
                        ip = ip.strip()
                        otx.detect_ip(ip)
                        otx.check_alerts(ip)
                        content = otx.results.pop()
                        if 'save_file_flag' in locals():
                            otx.save_result(content, outfile=output_file)
                        # ip处理结束即重置ip的情报计数
                        otx.alerts = []
                        otx.entity_num += 1
                elif query_type.upper() == 'DOMAIN':
                    for domain in target_list:
                        domain = domain.strip()
                        otx.detect_domain(domain)
                        otx.check_alerts(domain)
                        content = otx.results.pop()
                        if 'save_file_flag' in locals():
                            otx.save_result(content, outfile=output_file)
                        # ip处理结束即重置ip情报数
                        otx.alerts = []
                        otx.entity_num += 1
    except:
        pass
        # traceback.print_exc()


if __name__ == "__main__":
    try:
        main()
    except KeyboardInterrupt as e:
        cprint_red("Exit ...")