Example #1
0
    def run(self, conf, args, plugins):
        if 'subcommand' in args:
            if conf["VirusTotal"]["type"] != "public":
                vt = PrivateApi(conf["VirusTotal"]["key"])
                if args.subcommand == "hash":
                    response = vt.get_file_report(args.HASH)
                    if args.raw:
                        print(json.dumps(response, sort_keys=False, indent=4))
                        if args.extended:
                            response = vt.get_network_traffic(args.HASH)
                            print(
                                json.dumps(response, sort_keys=False,
                                           indent=4))
                            response = vt.get_file_behaviour(args.HASH)
                            print(
                                json.dumps(response, sort_keys=False,
                                           indent=4))
                    else:
                        self.print_file(response)
                elif args.subcommand == "dl":
                    if os.path.isfile(args.HASH):
                        print("File %s already exists" % args.HASH)
                        sys.exit(0)
                    data = vt.get_file(args.HASH)
                    if isinstance(data, dict):
                        if 'results' in data:
                            with open(args.HASH, "wb") as f:
                                f.write(data['results'])
                            print("File downloaded as %s" % args.HASH)
                        else:
                            print('Invalid answer format')
                            sys.exit(1)
                    else:
                        with open(args.HASH, "wb") as f:
                            f.write(data)
                        print("File downloaded as %s" % args.HASH)

                elif args.subcommand == "file":
                    with open(args.FILE, "rb") as f:
                        # FIXME : could be more efficient
                        data = f.read()
                    m = hashlib.sha256()
                    m.update(data)
                    h = m.hexdigest()
                    response = vt.get_file_report(h)
                    if args.raw:
                        print(json.dumps(response, sort_keys=False, indent=4))
                    else:
                        self.print_file(response)
                elif args.subcommand == "hashlist":
                    with open(args.FILE, 'r') as infile:
                        data = infile.read().split()
                    hash_list = list(set([a.strip() for a in data]))
                    print(
                        "Hash;Found;Detection;Total AV;First Seen;Last Seen;Link"
                    )
                    for h in hash_list:
                        response = vt.get_file_report(h)
                        if response["response_code"] != 200:
                            print("Error with the request (reponse code %i)" %
                                  response["response_code"])
                            print(
                                json.dumps(response, sort_keys=False,
                                           indent=4))
                            print("Quitting...")
                            sys.exit(1)
                        if "response_code" in response["results"]:
                            if response["results"]["response_code"] == 0:
                                print("%s;Not found;;;;;" % h)
                            else:
                                print("%s;Found;%i;%i;%s;%s;%s" %
                                      (h, response["results"]["positives"],
                                       response["results"]["total"],
                                       response["results"]["first_seen"],
                                       response["results"]["last_seen"],
                                       response["results"]["permalink"]))
                        else:
                            print("%s;Not found;;;;;" % h)
                elif args.subcommand == "domainlist":
                    with open(args.FILE, 'r') as infile:
                        data = infile.read().split()
                    for d in data:
                        print("################ Domain %s" % d.strip())
                        res = vt.get_domain_report(d.strip())
                        self.print_domaininfo(res)
                elif args.subcommand == "iplist":
                    with open(args.FILE, 'r') as infile:
                        data = infile.read().split()
                    for d in data:
                        print("################ IP %s" % d.strip())
                        res = vt.get_ip_report(unbracket(d.strip()))
                        print(json.dumps(res, sort_keys=False, indent=4))
                elif args.subcommand == "domain":
                    res = vt.get_domain_report(unbracket(args.DOMAIN))
                    if args.json:
                        print(json.dumps(res, sort_keys=False, indent=4))
                    else:
                        self.print_domaininfo(res)
                elif args.subcommand == "ip":
                    res = vt.get_ip_report(unbracket(args.IP))
                    print(json.dumps(res, sort_keys=False, indent=4))
                elif args.subcommand == "url":
                    res = vt.get_url_report(args.URL)
                    print(json.dumps(res, sort_keys=False, indent=4))
                else:
                    self.parser.print_help()
            else:
                vt = PublicApi(conf["VirusTotal"]["key"])
                if args.subcommand == "hash":
                    response = vt.get_file_report(args.HASH)
                    if args.raw:
                        print(json.dumps(response, sort_keys=False, indent=4))
                    else:
                        self.print_file(response)
                elif args.subcommand == "file":
                    with open(args.FILE, "rb") as f:
                        # FIXME : could be more efficient
                        data = f.read()
                    m = hashlib.sha256()
                    m.update(data)
                    response = vt.get_file_report(m.hexdigest())
                    if args.raw:
                        print(json.dumps(response, sort_keys=False, indent=4))
                    else:
                        self.print_file(response)
                elif args.subcommand == "hashlist":
                    with open(args.FILE, 'r') as infile:
                        data = infile.read().split()
                    hash_list = list(set([a.strip() for a in data]))
                    print("Hash;Found;Detection;Total AV;Link")
                    for h in hash_list:
                        response = vt.get_file_report(h)
                        if response["response_code"] != 200:
                            print("Error with the request (reponse code %i)" %
                                  response["response_code"])
                            print(
                                json.dumps(response, sort_keys=False,
                                           indent=4))
                            print("Quitting...")
                            sys.exit(1)
                        if "response_code" in response["results"]:
                            if response["results"]["response_code"] == 0:
                                print("%s;Not found;;;" % h)
                            else:
                                print("%s;Found;%i;%i;%s" %
                                      (h, response["results"]["positives"],
                                       response["results"]["total"],
                                       response["results"]["permalink"]))
                        else:
                            print("%s;Not found;;;" % h)
                elif args.subcommand == "domain":
                    res = vt.get_domain_report(unbracket(args.DOMAIN))
                    if args.json:
                        print(json.dumps(res, sort_keys=False, indent=4))
                    else:
                        self.print_domaininfo(res)
                elif args.subcommand == "ip":
                    res = vt.get_ip_report(unbracket(args.IP))
                    print(json.dumps(res, sort_keys=False, indent=4))
                elif args.subcommand == "url":
                    res = vt.get_url_report(args.URL)
                    print(json.dumps(res, sort_keys=False, indent=4))
                elif args.subcommand == "domainlist":
                    print(
                        "Not implemented yet with public access, please propose PR if you need it"
                    )
                elif args.subcommand == "dl":
                    print(
                        "VirusTotal does not allow downloading files with a public feed, sorry"
                    )
                    sys.exit(0)
                else:
                    self.parser.print_help()
        else:
            self.parser.print_help()
Example #2
0
class RuleEngine(object):
    def __init__(self):
        self.vt_req_counter = 0
        self.vt_req_timer = time.time()
        self.vd = Validator()
        self.vt = VirusTotal(apikey.APIKEY_1)
        self.cache = {}

    def iterpcap(self, path):
        for dirPath, dirNames, fileNames in os.walk(path):
            for f in fileNames:
                if f.split('.')[1] == 'pcap':
                    # check the file is pcap file
                    yield os.path.join(dirPath, f)

    def _iterpayload(self, path):
        connection = utils.follow_tcp_stream(path)
        for conn, frame in connection.iteritems():
            for seq, content in frame.iteritems():
                if content:
                    # Generate the content and 5-tuple
                    yield content, conn
                else:
                    # Some packets have no payload
                    pass

    def _check_timer_counter(self):
        if self.vt_req_counter == 4:
            self.vt_req_counter = 0
            period = time.time() - self.vt_req_timer
            waiting = 60 - period + 1
            if waiting > 0:
                logger.info("Waiting %s seconds", (str(waiting)))
                time.sleep(waiting)
            self.vt_req_timer = time.time()

    def _make_rule(self, content, uricontent, dst_port, sid=0):
        rule = SnortRule()
        pattern = dict()
        pattern['msg'] = '"Trojan.Gen.uricontent"'
        pattern['content'] = ['"{host}"'.format(host=content), 'nocase']
        pattern['uricontent'] = ['"{uri}"'.format(uri=uricontent), 'nocase']
        # pattern['sid'] = sid
        pattern['dst_port'] = dst_port
        rule.set_malicious_pattern(**pattern)
        return rule

    def _get_url_positive(self, resource):
        m = hashlib.sha1(resource)
        urlkey = m.hexdigest()
        if urlkey in self.cache.keys():
            # logger.info("%s in cache" % resource)
            positives = self.cache.get(urlkey)[1]
            return positives
        else:
            self.vt_req_counter += 1
            logger.info("Search on VirusTotal counter: %s",
                        str(self.vt_req_counter))

            response = self.vt.get_url_report(resource)

            if response.get('error') is not None:
                logger.info("Error: {e}".format(e=response.get('error')))
                sys.exit(0)

            results = response.get('results')
            positives = results.get('positives')
            self._check_timer_counter()

            if positives >= 0:
                self.cache[urlkey] = [resource, positives]
                return positives
            elif positives is None:
                logger.info('''No report.
                            Submmit the URL to VirusTotal countert: %s''',
                            str(self.vt_req_counter))
                self.vt.scan_url(resource)
                self._check_timer_counter()
                return None
            else:
                logger.debug("Get reports failed.")
                return None

    def gen_rule(self, pcap_path):
        self.cache = pickle_tool.check_json()
        for content, conn in self._iterpayload('%s' % (pcap_path)):
            # print content, utils.connection_key_2_str(conn)
            get_method = self.vd.is_get_method(content)
            host = self.vd.is_hsot(content)
            if host and get_method:
                if get_method.group(1) == '/':
                    url = self.vd.is_valid_url(host.group(1).rstrip())
                else:
                    url = self.vd.is_valid_url(host.group(1).rstrip() +
                                               get_method.group(1))

                if url is not None:
                    # valid_utf8 = True
                    try:
                        url.group(0).decode('utf-8')
                    except UnicodeDecodeError:
                        with open('invalid_utf8', 'a') as fp:
                            fp.write('{u}\n'.format(u=url.group(0)))
                        url = None
                        # valid_utf8 = False

                if url is not None:
                    host_content = host.group(0).rstrip()
                    uricontent = get_method.group(1)
                    pos = self._get_url_positive(url.group(0))

                    if pos > 0:
                        if uricontent == '/':
                            uricontent = None
                        #print host_content
                        rule = self._make_rule(host_content, uricontent,
                                               conn[3], 0)

                        with open('uricontent.rules', 'a') as fp:
                            fp.write('{r}\n'.format(r=str(rule)))
                        yield rule
                    else:
                        # positives == 0 or positives == None
                        pass
                else:
                    # invalid_url
                    pass
            else:
                pass

        pickle_tool.update_json(self.cache)
Example #3
0
class RuleEngine(object):
    def __init__(self):
        self.vt_req_counter = 0
        self.vt_req_timer = time.time()
        self.vd = Validator()
        self.vt = VirusTotal(apikey.APIKEY_1)
        self.cache = {}

    def iterpcap(self, path):
        for dirPath, dirNames, fileNames in os.walk(path):
            for f in fileNames:
                if f.split('.')[1] == 'pcap':
                    # check the file is pcap file
                    yield os.path.join(dirPath, f)

    def _iterpayload(self, path):
        connection = utils.follow_tcp_stream(path)
        for conn, frame in connection.iteritems():
            for seq, content in frame.iteritems():
                if content:
                    # Generate the content and 5-tuple
                    yield content, conn
                else:
                    # Some packets have no payload
                    pass

    def _check_timer_counter(self):
        if self.vt_req_counter == 4:
            self.vt_req_counter = 0
            period = time.time() - self.vt_req_timer
            waiting = 60 - period + 1
            if waiting > 0:
                logger.info("Waiting %s seconds", (str(waiting)))
                time.sleep(waiting)
            self.vt_req_timer = time.time()

    def _make_rule(self, content, uricontent, dst_port, sid=0):
        rule = SnortRule()
        pattern = dict()
        pattern['msg'] = '"Trojan.Gen.uricontent"'
        pattern['content'] = ['"{host}"'.format(host=content), 'nocase']
        pattern['uricontent'] = ['"{uri}"'.format(uri=uricontent), 'nocase']
        # pattern['sid'] = sid
        pattern['dst_port'] = dst_port
        rule.set_malicious_pattern(**pattern)
        return rule

    def _get_url_positive(self, resource):
        m = hashlib.sha1(resource)
        urlkey = m.hexdigest()
        if urlkey in self.cache.keys():
            # logger.info("%s in cache" % resource)
            positives = self.cache.get(urlkey)[1]
            return positives
        else:
            self.vt_req_counter += 1
            logger.info("Search on VirusTotal counter: %s",
                        str(self.vt_req_counter))

            response = self.vt.get_url_report(resource)

            if response.get('error') is not None:
                logger.info("Error: {e}".format(e=response.get('error')))
                sys.exit(0)

            results = response.get('results')
            positives = results.get('positives')
            self._check_timer_counter()

            if positives >= 0:
                self.cache[urlkey] = [resource, positives]
                return positives
            elif positives is None:
                logger.info(
                    '''No report.
                            Submmit the URL to VirusTotal countert: %s''',
                    str(self.vt_req_counter))
                self.vt.scan_url(resource)
                self._check_timer_counter()
                return None
            else:
                logger.debug("Get reports failed.")
                return None

    def gen_rule(self, pcap_path):
        self.cache = pickle_tool.check_json()
        for content, conn in self._iterpayload('%s' % (pcap_path)):
            # print content, utils.connection_key_2_str(conn)
            get_method = self.vd.is_get_method(content)
            host = self.vd.is_hsot(content)
            if host and get_method:
                if get_method.group(1) == '/':
                    url = self.vd.is_valid_url(host.group(1).rstrip())
                else:
                    url = self.vd.is_valid_url(
                        host.group(1).rstrip() + get_method.group(1))

                if url is not None:
                    # valid_utf8 = True
                    try:
                        url.group(0).decode('utf-8')
                    except UnicodeDecodeError:
                        with open('invalid_utf8', 'a') as fp:
                            fp.write('{u}\n'.format(u=url.group(0)))
                        url = None
                        # valid_utf8 = False

                if url is not None:
                    host_content = host.group(0).rstrip()
                    uricontent = get_method.group(1)
                    pos = self._get_url_positive(url.group(0))

                    if pos > 0:
                        if uricontent == '/':
                            uricontent = None
                        #print host_content
                        rule = self._make_rule(host_content, uricontent,
                                               conn[3], 0)

                        with open('uricontent.rules', 'a') as fp:
                            fp.write('{r}\n'.format(r=str(rule)))
                        yield rule
                    else:
                        # positives == 0 or positives == None
                        pass
                else:
                    # invalid_url
                    pass
            else:
                pass

        pickle_tool.update_json(self.cache)