Beispiel #1
0
 def analyze_file(self, parsed):
     if path.exists(parsed.file) and path.isfile(parsed.file):
         data = self.analyzer.analyze(parsed)
         self.reporthandler.check_output(data, parsed)
         del data
     else:
         log_string("Target File/dump is wrong..", "Red")
Beispiel #2
0
 def check_output(self, data, parsed):
     '''
     start saving output logic
     '''
     renderedhtml = "Error"
     if parsed.db_dump_html or parsed.disk_dump_html:
         renderedhtml = self.htmlmaker.render_template(
             data, None, None, parsed, True)
         log_string(
             "Generated Html file {}".format(data["Location"]["html"]),
             "Yellow")
     if parsed.db_dump_json or parsed.disk_dump_json or parsed.print_json:
         data = serialize_obj(
             data
         )  # force this <--- incase some value returned with object of type 'NoneType' has no len
         self.jsonmaker.clean_data(data)
     if parsed.disk_dump_json:
         if self.jsonmaker.dump_json(data):
             log_string(
                 "Generated JSON file {}".format(data["Location"]["json"]),
                 "Yellow")
             if parsed.open:
                 open_in_browser(data["Location"]["json"])
     if parsed.print_json:
         self.jsonmaker.print_json(data)
     self.save_output(data, renderedhtml, parsed)
Beispiel #3
0
 def do_cache_switches(self):
     try:
         with StringIO() as buf, redirect_stdout(buf):
             self._analyze_parser.print_help()
             output = buf.getvalue()
         #subbed = search(compile(r"Analysis switches\:.*",DOTALL),output).group(0)
         put_cache("switches", output)
         log_string("Dumped switches", "Green")
     except:
         log_string("Dumping switches failed", "Red")
Beispiel #4
0
 def do_cache_switches(self):
     good_exec = False
     with ignore_excpetion(Exception):
         with StringIO() as buf, redirect_stdout(buf):
             self._analyze_parser.print_help()
             output = buf.getvalue()
         #subbed = search(compile(r"Analysis switches\:.*", DOTALL), output).group(0)
         put_cache("switches", output)
         good_exec = True
     if good_exec:
         log_string("Dumped switches", "Green")
     else:
         log_string("Dumping switches failed", "Red")
Beispiel #5
0
    def do_analyze(self, line, silent=False):
        good_exec = False
        with ignore_excpetion(Exception):
            line["output"] = json_settings[
                environ["analyzer_env"]]["malware_output_folder"]
            parsed_args = vars(self._analyze_parser.parse_args(""))
            parsed = Namespace({
                **parsed_args,
                **line
            }, ["open", "print"], [
                "db_dump_json", "db_dump_html", "disk_dump_html",
                "disk_dump_json"
            ])
            if not parsed.uuid:
                return
            if int(parsed.analyzer_timeout) > 0 and int(
                    parsed.analyzer_timeout) < 240:
                json_settings[
                    environ["analyzer_env"]]["analyzer_timeout"] = int(
                        parsed.analyzer_timeout)
            if int(parsed.function_timeout) > 0 and int(
                    parsed.function_timeout) < 240:
                json_settings[
                    environ["analyzer_env"]]["function_timeout"] = int(
                        parsed.function_timeout)
            good_exec = True
        if good_exec:
            log_string(
                "Default timeout {}s for the task, and {}s for each logic".
                format(
                    json_settings[environ["analyzer_env"]]["analyzer_timeout"],
                    json_settings[environ["analyzer_env"]]
                    ["function_timeout"]), "Yellow")
        else:
            log_string("Parsing failed, something went wrong..", "Red")
            return

        log_string("Task {} (Started)".format(parsed.uuid), "Yellow")

        if parsed.file:
            with ignore_excpetion(Exception):
                setup_task_logger(parsed.uuid)
                self.analyze_file(parsed)
            cancel_task_logger(parsed.uuid)
        else:
            log_string("File, Folder or Buffer is missing", "Red")

        log_string("Task {} (Finished)".format(parsed.uuid), "Green")
Beispiel #6
0
    def __init__(self, mode):
        super(QBAnalyzer, self).__init__()
        self.analyzer = Analyzer()
        self.reporthandler = ReportHandler()
        self.do_cache_switches()

        if mode == "--silent":
            queue = QBQueue(
                "analyzer",
                json_settings[environ["analyzer_env"]]["redis_settings"])
            log_string("Waiting on tasks..", "Green")
            while True:
                sleep(1)
                task = queue.get()
                if task != None:
                    self.do_analyze(task['data'], True)
                    log_string("Waiting on tasks..", "Green")
                    collect()
            kill_process_and_subs()
        else:
            self.prompt = "(testing) "  #no more interactive
Beispiel #7
0
    def __init__(self):
        self.datastruct = {
            "Detection": [],
            "_Detection": ["Count", "Offset", "Rule", "Parsed", "Match"]
        }

        self.detections = path.abspath(
            path.join(path.dirname(__file__), 'detections'))
        if not self.detections.endswith(path.sep):
            self.detections = self.detections + path.sep
        if not path.isdir(self.detections): mkdir(self.detections)
        self.modules = glob(self.detections + "*.py")
        self.imported = []
        for x in self.modules:
            try:
                mod = import_module(".qbdetect.detections.{}".format(
                    path.basename(x)[:-3]),
                                    package="analyzer")
                self.imported.append(getattr(mod, "startanalyzing"))
            except Exception as e:
                print(e)
                log_string("Loading plugins failed", "Red")
Beispiel #8
0
    def __init__(self):
        '''
        initialize class, this has to pass
        '''
        self.datastruct = {
            "Detection": [],
            "_Detection": ["Count", "Offset", "Rule", "Parsed", "Match"]
        }

        self.detections = path.abspath(
            path.join(path.dirname(__file__), 'detections'))
        if not self.detections.endswith(path.sep):
            self.detections = self.detections + path.sep
        if not path.isdir(self.detections):
            mkdir(self.detections)
        self.modules = glob(self.detections + "*.py")
        self.imported = []
        for _module in self.modules:
            with ignore_excpetion(Exception):
                mod = import_module(".qbdetect.detections.{}".format(
                    path.basename(_module)[:-3]),
                                    package="analyzer")
                self.imported.append(getattr(mod, "startanalyzing"))
                log_string("Loading plugins completed", "Green")
Beispiel #9
0
    def analyze(self, parsed) -> dict:
        '''
        main analyze logic!
        '''

        data = {}

        log_string("Start analyzing {}".format(parsed.file), "Yellow")

        self.qbfile.analyze(data, parsed.uuid, parsed.file, parsed.output)
        self.qbencoding.analyze(data, parsed.file, parsed.unicode)

        if self.pdfparser.check_sig(data):
            self.pdfparser.analyze(data)
        elif self.windowspe.check_sig(data):
            self.windowspe.analyze(data)
            if parsed.behavior or parsed.full:
                self.qbbehavior.analyze(data, "winapi.json")
            if parsed.xref or parsed.full:
                self.qbd3generator.create_d3_ref(data)
        elif self.linuxelf.check_sig(data):
            self.linuxelf.analyze(data)
            if parsed.xref or parsed.full:
                self.qbd3generator.create_d3_ref(data)
            if parsed.behavior or parsed.full:
                self.qbbehavior.analyze(data, "linux.json")
        elif self.macho.check_sig_macho(data):
            self.macho.analyze_macho(data)
        elif self.macho.check_sig_dmg(data):
            self.macho.analyze_dmg(data)
        elif self.apkparser.check_sig_apk(data):
            self.apkparser.analyze_apk(data)
            if parsed.behavior or parsed.full:
                self.qbbehavior.analyze(data, "android.json")
        elif self.apkparser.check_sig_dex(data):
            self.apkparser.analyze_dex(data)
            if parsed.behavior or parsed.full:
                self.qbbehavior.analyze(data, "android.json")
        elif self.blackberry.check_sig(data):
            self.blackberry.analyze(data)
        elif self.emailparser.check_sig(data):
            self.emailparser.analyze(data, parsed)
        elif self.msgparser.check_sig(data):
            self.msgparser.analyze(data, parsed)
        elif self.readpackets.check_sig(data):
            self.readpackets.analyze(data)
            self.qbsnort.analyze(data)
            if parsed.dga or parsed.full:
                self.qbdga.analyze(data)
        elif self.officex.check_sig(data):
            self.officex.analyze(data)
        elif self.htmlparser.check_sig(data):
            self.htmlparser.analyze(data)
        elif self.oleparser.check_sig(data):
            self.oleparser.analyze(data)
        else:
            self.qbfile.check_sig(data)
            if parsed.behavior or parsed.full:
                self.qbbehavior.analyze(data, "winapi.json")
                self.qbbehavior.analyze(data, "linux.json")
                self.qbbehavior.analyze(data, "android.json")
        if parsed.w_internal or parsed.w_original or parsed.w_hash or parsed.w_words or parsed.w_all or parsed.full:
            self.qbwhitelist.analyze(data, parsed)
        if parsed.language or parsed.full:
            self.qblanguage.analyze(data, parsed)
        if parsed.phishing or parsed.full:
            self.qbphising.analyze(data, parsed)
        if parsed.patterns or parsed.full:
            self.qbpatterns.analyze(data)
        if parsed.suspicious or parsed.full:
            self.qbsuspicious.analyze(data)
        if parsed.topurl or parsed.full:
            self.qburlsimilarity.analyze(data)
        if parsed.ocr or parsed.full:
            self.qbocrdetect.analyze(data)
        if parsed.enc or parsed.full:
            self.qbencryption.analyze(data)
        if parsed.cards or parsed.full:
            self.qbcreditcards.analyze(data)
        if parsed.creds or parsed.full:
            self.qbcreditcardsedentials.analyze(data)
        if parsed.secrets or parsed.full:
            self.qbsecrets.analyze(data)
        if parsed.plugins or parsed.full:
            self.loaddetections.checkwithdetections(data)
        if parsed.mitre or parsed.full:
            self.qbmitresearch.analyze(data)
        if parsed.yara or parsed.tags or parsed.full:
            self.yaraparser.checkwithyara(data, parsed, None)
        if parsed.visualize or parsed.full:
            self.qbd3generator.create_d3_artifacts(data)
        if parsed.flags or parsed.full:
            self.qbcountriesviz.get_flags_from_codes(data)
        if parsed.worldmap or parsed.full:
            self.qbcountriesviz.get_all_codes(data)
        return data
Beispiel #10
0
 def print_json(self, data):
     '''
     print json in terminal
     '''
     log_string(jdumps(data, indent=4, sort_keys=True, cls=ComplexEncoder), "Yellow")
Beispiel #11
0
def ctrlhandler(signum, frame):
    stdout.write("\n")
    log_string("Terminating..", "Red")
    kill_process_and_subs()
Beispiel #12
0
 def save_output(self, data, renderedhtml, parsed):
     '''
     save output to file or database
     '''
     temp_id = None
     temp_es = None
     if len(data) > 0:
         if parsed.db_result:
             serialize_obj(data)
             #temp_id = add_item("tasks", "results", dataserialized)
             #if temp_id:
             #    log_string("JSON result added to db", "Yellow")
             #else:
             #    log_string("Unable to add JSON result to db", "Red")
         if parsed.db_dump_json:
             datajson = self.jsonmaker.dump_json_and_return(data)
             temp_id = add_item_fs(defaultdb["dbname"], defaultdb["reportscoll"], datajson, data["Details"]["Properties"]["md5"], data["Details"]["Properties"], parsed.uuid, "application/json", datetime.now())
             #temp_es = push_to_elastic(parsed.uuid, datajson["Details"])
             if temp_id:
                 log_string("JSON result dumped into db", "Yellow")
             else:
                 log_string("Unable to dump JSON result to db", "Red")
             if temp_es:
                 log_string("JSON result dumped into elastic", "Yellow")
             else:
                 log_string("Unable to dump JSON result to elastic", "Red")
         if parsed.db_dump_html:
             datajson = self.jsonmaker.dump_json_and_return(data)
             temp_id = add_item_fs(defaultdb["dbname"], defaultdb["reportscoll"], renderedhtml, data["Details"]["Properties"]["md5"], data["Details"]["Properties"], parsed.uuid, "text/html", datetime.now())
             if temp_id:
                 log_string("HTML result dumped into db", "Yellow")
             else:
                 log_string("Unable to dump HTML result to db", "Red")
Beispiel #13
0
    def checkwithyara(self, data, parsed, check=""):
        '''
        check file with compiled yara detection and append results into list
        '''
        data["Yara"] = deepcopy(self.datastruct)
        if parsed.full or parsed.tags:
            log_string("Finding yara tags", "Green")
            matches = self.rules_tags.match(data["Location"]["File"])
            list_of_matches = []
            if len(matches) > 0:
                for match in matches:
                    full_rule = "{}:{}".format(match.namespace, match.rule)
                    if full_rule not in list_of_matches:
                        list_of_matches.append(full_rule)
                        color = None
                        try:
                            color = default_colors[match.namespace]
                        finally:
                            data["Yara"]["Tags"].append({
                                "fullrule":
                                full_rule,
                                "namespace":
                                match.namespace,
                                "color":
                                color,
                                "rule":
                                match.rule,
                                "meta":
                                '\n'.join(
                                    "{}: {}".format(key, match.meta[key])
                                    for key in match.meta)
                            })

        if parsed.full or parsed.yara:
            matches = self.rules.match(data["Location"]["File"])
            log_string("Finding yara matches", "Green")
            if len(matches) > 0:
                for match in matches:
                    temp = {}
                    for _match in match.strings:
                        key = "{}:{}".format(match.namespace, match)
                        try:
                            pattern = _match[2].decode("utf-8",
                                                       errors="ignore")
                            ppattern = "None"
                        except:
                            pattern = ''.join('\\x{:02x}'.format(x)
                                              for x in _match[2])
                            ppattern = _match[2].decode("ascii", "replace")

                        if pattern in temp:
                            temp[pattern][0] += 1
                            temp[pattern][1].append(hex(_match[0]))
                        else:
                            if match.rule in self.yararulenamelist:
                                temp.update({
                                    pattern: [
                                        0, [hex(_match[0])],
                                        str(match), ppattern,
                                        self.yararulenamelist[match.rule]
                                    ]
                                })
                    for item in temp:
                        data["Yara"]["Matches"].append({
                            "Count":
                            temp[item][0],
                            "Offset":
                            " ".join(temp[item][1]),
                            "Rule":
                            temp[item][2],
                            "Patteren":
                            item,
                            "Parsed":
                            temp[item][3],
                            "Condition":
                            temp[item][4]
                        })