Example #1
0
    def export(self, ole, export_path):
        if not self.safe_makedir(export_path):
            return

        for stream in ole.listdir(streams=True, storages=True):
            try:
                stream_content = ole.openstream(stream).read()
            except Exception as e:
                self.log('warning', "Unable to open stream {0}: {1}".format(string_clean('/'.join(stream)), e))
                continue

            store_path = os.path.join(export_path, string_clean('-'.join(stream)))

            flash_objects = self.detect_flash(ole.openstream(stream).read())
            if len(flash_objects) > 0:
                self.log('info', "Saving Flash objects...")
                count = 1

                for header, flash_object in flash_objects:
                    self.print_swf_header_info(header)
                    save_path = '{0}-FLASH-Decompressed{1}'.format(store_path, count)
                    with open(save_path, 'wb') as flash_out:
                        flash_out.write(flash_object)

                    self.log('item', "Saved Decompressed Flash File to {0}".format(save_path))
                    count += 1

            with open(store_path, 'wb') as out:
                out.write(stream_content)

            self.log('info', "Saved stream to {0}".format(store_path))

        ole.close()
Example #2
0
 def parse_vba(self, save_path):
     save = False
     vbaparser = VBA_Parser(__sessions__.current.file.path)
     # Check for Macros
     if not vbaparser.detect_vba_macros():
         self.log('error', "No Macro's Detected")
         return
     self.log('info', "Macro's Detected")
     #try:
     if True:
         an_results = {'AutoExec':[], 'Suspicious':[], 'IOC':[], 'Hex String':[], 'Base64 String':[], 'Dridex String':[], 'VBA string':[]}
         for (filename, stream_path, vba_filename, vba_code) in vbaparser.extract_macros():
             self.log('info', "Stream Details")
             self.log('item', "OLE Stream: {0}".format(string_clean(stream_path)))
             self.log('item', "VBA Filename: {0}".format(string_clean(vba_filename)))
             # Analyse the VBA Code
             vba_scanner = VBA_Scanner(vba_code)
             analysis = vba_scanner.scan(include_decoded_strings=True)
             for kw_type, keyword, description in analysis:
                 an_results[kw_type].append([string_clean_hex(keyword), description])
                 
             # Save the code to external File
             if save_path:
                 try:
                     with open(save_path, 'a') as out:
                         out.write(vba_code)
                     save = True
                 except:
                     self.log('error', "Unable to write to {0}".format(save_path))
                     return
         # Print all Tables together
         self.log('info', "AutoRun Macros Found")
         self.log('table', dict(header=['Method', 'Description'], rows=an_results['AutoExec']))
         
         self.log('info', "Suspicious Keywords Found")
         self.log('table', dict(header=['KeyWord', 'Description'], rows=an_results['Suspicious']))
         
         self.log('info', "Possible IOC's")
         self.log('table', dict(header=['IOC', 'Type'], rows=an_results['IOC']))
         
         self.log('info', "Hex Strings")
         self.log('table', dict(header=['Decoded', 'Raw'], rows=an_results['Hex String']))
         
         self.log('info', "Base64 Strings")
         self.log('table', dict(header=['Decoded', 'Raw'], rows=an_results['Base64 String']))
         
         self.log('info', "Dridex String")
         self.log('table', dict(header=['Decoded', 'Raw'], rows=an_results['Dridex String']))
         
         self.log('info', "VBA string")
         self.log('table', dict(header=['Decoded', 'Raw'], rows=an_results['VBA string']))
         
         
         
         if save:
             self.log('success', "Writing VBA Code to {0}".format(save_path))
     #except:
         #self.log('error', "Unable to Process File")
     # Close the file
     vbaparser.close()
Example #3
0
    def parse_vba(self, save_path):
        save = False
        vbaparser = VBA_Parser(__sessions__.current.file.path)
        # Check for Macros
        if not vbaparser.detect_vba_macros():
            self.log('error', "No Macro's Detected")
            return
        self.log('info', "Macro's Detected")
        # try:
        if True:
            an_results = {'AutoExec': [], 'Suspicious': [], 'IOC': [], 'Hex String': [], 'Base64 String': [], 'Dridex string': [], 'VBA string': []}
            for (filename, stream_path, vba_filename, vba_code) in vbaparser.extract_macros():
                self.log('info', "Stream Details")
                self.log('item', "OLE Stream: {0}".format(string_clean(stream_path)))
                self.log('item', "VBA Filename: {0}".format(string_clean(vba_filename)))
                # Analyse the VBA Code
                vba_scanner = VBA_Scanner(vba_code)
                analysis = vba_scanner.scan(include_decoded_strings=True)
                for kw_type, keyword, description in analysis:
                    an_results[kw_type].append([string_clean_hex(keyword), description])

                # Save the code to external File
                if save_path:
                    try:
                        with open(save_path, 'ab') as out:
                            out.write(vba_code)
                        save = True
                    except Exception:
                        self.log('error', "Unable to write to {0}".format(save_path))
                        return
            # Print all Tables together
            self.log('info', "AutoRun Macros Found")
            self.log('table', dict(header=['Method', 'Description'], rows=an_results['AutoExec']))

            self.log('info', "Suspicious Keywords Found")
            self.log('table', dict(header=['KeyWord', 'Description'], rows=an_results['Suspicious']))

            self.log('info', "Possible IOC's")
            self.log('table', dict(header=['IOC', 'Type'], rows=an_results['IOC']))

            self.log('info', "Hex Strings")
            self.log('table', dict(header=['Decoded', 'Raw'], rows=an_results['Hex String']))

            self.log('info', "Base64 Strings")
            self.log('table', dict(header=['Decoded', 'Raw'], rows=an_results['Base64 String']))

            self.log('info', "Dridex string")
            self.log('table', dict(header=['Decoded', 'Raw'], rows=an_results['Dridex string']))

            self.log('info', "VBA string")
            self.log('table', dict(header=['Decoded', 'Raw'], rows=an_results['VBA string']))

            if save:
                self.log('success', "Writing VBA Code to {0}".format(save_path))
                # except:
                # self.log('error', "Unable to Process File")
        # Close the file
        vbaparser.close()
Example #4
0
    def export(self, ole, export_path):
        if not os.path.exists(export_path):
            try:
                os.makedirs(export_path)
            except Exception as e:
                self.log(
                    'error', "Unable to create directory at {0}: {1}".format(
                        export_path, e))
                return
        else:
            if not os.path.isdir(export_path):
                self.log('error', "You need to specify a folder, not a file")
                return

        for stream in ole.listdir(streams=True, storages=True):
            try:
                stream_content = ole.openstream(stream).read()
            except Exception as e:
                self.log(
                    'warning', "Unable to open stream {0}: {1}".format(
                        string_clean('/'.join(stream)), e))
                continue

            store_path = os.path.join(export_path,
                                      string_clean('-'.join(stream)))

            flash_objects = self.detect_flash(ole.openstream(stream).read())
            if len(flash_objects) > 0:
                self.log('info', "Saving Flash objects...")
                count = 1

                for flash in flash_objects:
                    # If SWF Is compressed save the swf and the decompressed data seperatly.
                    if flash[2]:
                        save_path = '{0}-FLASH-Decompressed{1}'.format(
                            store_path, count)
                        with open(save_path, 'wb') as flash_out:
                            flash_out.write(flash[4])

                        self.log(
                            'item',
                            "Saved Decompressed Flash File to {0}".format(
                                save_path))

                    save_path = '{0}-FLASH-{1}'.format(store_path, count)
                    with open(save_path, 'wb') as flash_out:
                        flash_out.write(flash[3])

                    self.log('item',
                             "Saved Flash File to {0}".format(save_path))
                    count += 1

            with open(store_path, 'wb') as out:
                out.write(stream_content)

            self.log('info', "Saved stream to {0}".format(store_path))

        ole.close()
Example #5
0
 def parse_vba(self, save_path):
     save = False
     vba = VBA_Parser(__sessions__.current.file.path)
     # Check for Macros
     if not vba.detect_vba_macros():
         self.log('error', "No Macro's Detected")
         return
     self.log('info', "Macro's Detected")
     try:
         run_rows = []
         word_rows = []
         pattern_rows = []
         for (filename, stream_path, vba_filename, vba_code) in vba.extract_macros():
             self.log('info', "Stream Details")
             self.log('item', "OLE Stream: {0}".format(string_clean(stream_path)))
             self.log('item', "VBA Filename: {0}".format(string_clean(vba_filename)))
             autoexec_keywords = detect_autoexec(vba_code)
             if autoexec_keywords:
                 for keyword, description in autoexec_keywords:
                     run_rows.append([keyword, description])
                 
             # Match Keyword Types
             suspicious_keywords = detect_suspicious(vba_code)
             if suspicious_keywords:
                 for keyword, description in suspicious_keywords:
                     word_rows.append([keyword, description])
                 
             # Match IOCs
             patterns = detect_patterns(vba_code)
             if patterns:
                 for pattern_type, value in patterns:
                     pattern_rows.append([pattern_type, value])
                 
             # Save the code to external File
             if save_path:
                 try:
                     with open(save_path, 'a') as out:
                         out.write(vba_code)
                     save = True
                 except:
                     self.log('Error', "Unable to write to {0}".format(save_path))
                     return
         # Print all Tables together
         self.log('info', "AutoRun Macros Found")
         self.log('table', dict(header=['KeyWord', 'Description'], rows=run_rows))
         self.log('info', "Suspicious Keywords Found")
         self.log('table', dict(header=['KeyWord', 'Description'], rows=word_rows))
         self.log('info', "Suspicious Patterns Found")
         self.log('table', dict(header=['Pattern', 'Value'], rows=pattern_rows))
         if save:
             self.log('success', "Writing VBA Code to {0}".format(save_path))
     except:
         self.log('Error', "Unable to Process File")
     # Close the file
     vba.close()
Example #6
0
 def parse_vba(self, save_path):
     save = False
     vba = VBA_Parser(__sessions__.current.file.path)
     # Check for Macros
     if not vba.detect_vba_macros():
         self.log('error', "No Macro's Detected")
         return
     self.log('info', "Macro's Detected")
     try:
         run_rows = []
         word_rows = []
         pattern_rows = []
         for (filename, stream_path, vba_filename, vba_code) in vba.extract_macros():
             self.log('info', "Stream Details")
             self.log('item', "OLE Stream: {0}".format(string_clean(stream_path)))
             self.log('item', "VBA Filename: {0}".format(string_clean(vba_filename)))
             autoexec_keywords = detect_autoexec(vba_code)
             if autoexec_keywords:
                 for keyword, description in autoexec_keywords:
                     run_rows.append([keyword, description])
                 
             # Match Keyword Types
             suspicious_keywords = detect_suspicious(vba_code)
             if suspicious_keywords:
                 for keyword, description in suspicious_keywords:
                     word_rows.append([keyword, description])
                 
             # Match IOCs
             patterns = detect_patterns(vba_code)
             if patterns:
                 for pattern_type, value in patterns:
                     pattern_rows.append([pattern_type, value])
                 
             # Save the code to external File
             if save_path:
                 try:
                     with open(save_path, 'a') as out:
                         out.write(vba_code)
                     save = True
                 except:
                     self.log('Error', "Unable to write to {0}".format(save_path))
                     return
         # Print all Tables together
         self.log('info', "AutoRun Macros Found")
         self.log('table', dict(header=['KeyWord', 'Description'], rows=run_rows))
         self.log('info', "Suspicious Keywords Found")
         self.log('table', dict(header=['KeyWord', 'Description'], rows=word_rows))
         self.log('info', "Suspicious Patterns Found")
         self.log('table', dict(header=['Pattern', 'Value'], rows=pattern_rows))
         if save:
             self.log('success', "Writing VBA Code to {0}".format(save_path))
     except:
         self.log('Error', "Unable to Process File")
     # Close the file
     vba.close()
Example #7
0
    def metadata(self, ole):
        meta = ole.get_metadata()
        for attribs in ['SUMMARY_ATTRIBS', 'DOCSUM_ATTRIBS']:
            self.log('info', "{0} Metadata".format(string_clean(attribs)))
            rows = []
            for key in getattr(meta, attribs):
                rows.append([key, string_clean(getattr(meta, key))])

            self.log('table', dict(header=['Name', 'Value'], rows=rows))

        ole.close()
Example #8
0
    def metadata(self, ole):
        meta = ole.get_metadata()
        for attribs in ['SUMMARY_ATTRIBS', 'DOCSUM_ATTRIBS']:
            self.log('info', "{0} Metadata".format(string_clean(attribs)))
            rows = []
            for key in getattr(meta, attribs):
                rows.append([key, string_clean(getattr(meta, key))])

            self.log('table', dict(header=['Name', 'Value'], rows=rows))

        ole.close()
Example #9
0
    def ole_meta(self, ole):
        """
        Get metadata on the Office document.
        """
        meta = ole.get_metadata()
        for attribs in ["SUMMARY_ATTRIBS", "DOCSUM_ATTRIBS"]:
            self.log("info", "{0} Metadata".format(string_clean(attribs)))
            rows = []
            for key in getattr(meta, attribs):
                rows.append([key, string_clean(getattr(meta, key))])

            self.log("table", dict(header=["Name", "Value"], rows=rows))

        ole.close()
Example #10
0
    def export(self, ole, export_path):
        if not os.path.exists(export_path):
            try:
                os.makedirs(export_path)
            except Exception as e:
                self.log('error', "Unable to create directory at {0}: {1}".format(export_path, e))
                return
        else:
            if not os.path.isdir(export_path):
                self.log('error', "You need to specify a folder, not a file")
                return

        for stream in ole.listdir(streams=True, storages=True):
            try:
                stream_content = ole.openstream(stream).read()
            except Exception as e:
                self.log('warning', "Unable to open stream {0}: {1}".format(string_clean('/'.join(stream)), e))
                continue

            store_path = os.path.join(export_path, string_clean('-'.join(stream)))

            flash_objects = self.detect_flash(ole.openstream(stream).read())
            if len(flash_objects) > 0:
                self.log('info', "Saving Flash objects...")
                count = 1

                for flash in flash_objects:
                    # If SWF Is compressed save the swf and the decompressed data seperatly.
                    if flash[2]:
                        save_path = '{0}-FLASH-Decompressed{1}'.format(store_path, count)
                        with open(save_path, 'wb') as flash_out:
                            flash_out.write(flash[4])

                        self.log('item', "Saved Decompressed Flash File to {0}".format(save_path))

                    save_path = '{0}-FLASH-{1}'.format(store_path, count)
                    with open(save_path, 'wb') as flash_out:
                        flash_out.write(flash[3])

                    self.log('item', "Saved Flash File to {0}".format(save_path))
                    count += 1

            with open(store_path, 'wb') as out:
                out.write(stream_content)

            self.log('info', "Saved stream to {0}".format(store_path))

        ole.close()
Example #11
0
    def metatimes(self, ole):
        rows = []
        rows.append([
            1,
            'Root',
            '',
            ole.root.getctime() if ole.root.getctime() else '',
            ole.root.getmtime() if ole.root.getmtime() else ''
        ])

        counter = 2
        for obj in ole.listdir(streams=True, storages=True):
            has_macro = ''
            try:
                if '\x00Attribu' in ole.openstream(obj).read():
                    has_macro = 'Yes'
            except:
                pass

            rows.append([
                counter,
                string_clean('/'.join(obj)),
                has_macro,
                ole.getctime(obj) if ole.getctime(obj) else '',
                ole.getmtime(obj) if ole.getmtime(obj) else ''
            ])

            counter += 1

        self.log('info', "OLE Structure:")
        self.log('table', dict(header=['#', 'Object', 'Macro', 'Creation', 'Modified'], rows=rows))

        ole.close()
Example #12
0
    def metatimes(self, ole):
        rows = []
        rows.append([
            1, 'Root', '',
            ole.root.getctime() if ole.root.getctime() else '',
            ole.root.getmtime() if ole.root.getmtime() else ''
        ])

        counter = 2
        for obj in ole.listdir(streams=True, storages=True):
            has_macro = ''
            try:
                if '\x00Attribu' in ole.openstream(obj).read():
                    has_macro = 'Yes'
            except Exception:
                pass

            rows.append([
                counter,
                string_clean('/'.join(obj)), has_macro,
                ole.getctime(obj) if ole.getctime(obj) else '',
                ole.getmtime(obj) if ole.getmtime(obj) else ''
            ])

            counter += 1

        self.log('info', "OLE Structure:")
        self.log(
            'table',
            dict(header=['#', 'Object', 'Macro', 'Creation', 'Modified'],
                 rows=rows))

        ole.close()
Example #13
0
    def ole_structure(self, ole):
        """
        """
        rows = []
        rows.append([
            1, "Root", "",
            ole.root.getctime() if ole.root.getctime() else "",
            ole.root.getmtime() if ole.root.getmtime() else ""
        ])

        counter = 2
        for obj in ole.listdir(streams=True, storages=True):
            has_macro = ""
            try:
                if "\x00Attribu" in ole.openstream(obj).read():
                    has_macro = "Yes"
            except Exception:
                pass

            rows.append([
                counter,
                string_clean("/".join(obj)), has_macro,
                ole.getctime(obj) if ole.getctime(obj) else "",
                ole.getmtime(obj) if ole.getmtime(obj) else ""
            ])

            counter += 1

        self.log("info", "OLE Structure:")
        self.log(
            "table",
            dict(header=["#", "Object", "Macro", "Creation", "Modified"],
                 rows=rows))

        ole.close()
Example #14
0
    def parse_message(self, message_folder):
        db = Database()
        email_header = os.path.join(message_folder, 'InternetHeaders.txt')
        email_body = os.path.join(message_folder, 'Message.txt')

        envelope = headers = email_text = ''
        if os.path.exists(email_header):
            envelope, headers = self.email_headers(email_header)
        if os.path.exists(email_body):
            email_text = open(email_body, 'rb').read()

        tags = 'pst, {0}'.format(message_folder)
        if os.path.exists(os.path.join(message_folder, 'Attachments')):
            for filename in os.listdir(
                    os.path.join(message_folder, 'Attachments')):
                if os.path.isfile(
                        os.path.join(message_folder, 'Attachments', filename)):
                    obj = File(
                        os.path.join(message_folder, 'Attachments', filename))
                    sha256 = hashlib.sha256(
                        open(
                            os.path.join(message_folder, 'Attachments',
                                         filename), 'rb').read()).hexdigest()
                    new_path = store_sample(obj)
                    if new_path:
                        # Add file to the database.
                        db.add(obj=obj, tags=tags)
                    # Add Email Details as a Note
                    # To handle duplicates we use multiple notes
                    headers_body = 'Envelope: \n{0}\nHeaders: \n{1}\n'.format(
                        envelope, headers)
                    db.add_note(sha256, 'Headers', headers_body)

                    # Add a note with email body
                    db.add_note(sha256, 'Email Body', string_clean(email_text))
Example #15
0
 def parse_message(self, message_folder):
     db = Database()
     email_header = os.path.join(message_folder, 'InternetHeaders.txt')
     email_body = os.path.join(message_folder, 'Message.txt')
     attachments = []
     
     envelope = headers = email_text = ''
     if os.path.exists(email_header):
         envelope, headers = self.email_headers(email_header)
     if os.path.exists(email_body):
         email_text = open(email_body, 'rb').read()
     
     tags = 'pst, {0}'.format(message_folder)
     if os.path.exists(os.path.join(message_folder, 'Attachments')):
         for filename in os.listdir(os.path.join(message_folder, 'Attachments')):
             if os.path.isfile(os.path.join(message_folder, 'Attachments', filename)):
                 obj = File(os.path.join(message_folder, 'Attachments', filename))
                 sha256 = hashlib.sha256(open(os.path.join(message_folder, 'Attachments', filename), 'rb').read()).hexdigest()
                 new_path = store_sample(obj)
                 success = False
                 if new_path:
                         # Add file to the database.
                         success = db.add(obj=obj, tags=tags)
                 # Add Email Details as a Note
                 # To handle duplicates we use multiple notes
                 headers_body = 'Envelope: \n{0}\nHeaders: \n{1}\n'.format(envelope, headers)
                 db.add_note(sha256, 'Headers', headers_body)
                 
                 # Add a note with email body
                 db.add_note(sha256, 'Email Body', string_clean(email_text))
Example #16
0
    def ole_export(self, ole, export_path):
        if not self._safe_makedir(export_path):
            return

        for stream in ole.listdir(streams=True, storages=True):
            try:
                stream_content = ole.openstream(stream).read()
            except Exception as e:
                self.log(
                    "warning", "Unable to open stream {0}: {1}".format(
                        string_clean("/".join(stream)), e))
                continue

            store_path = os.path.join(export_path,
                                      string_clean("-".join(stream)))

            flash_objects = self.detect_flash(ole.openstream(stream).read())
            if len(flash_objects) > 0:
                self.log("info", "Saving Flash objects...")
                count = 1

                for header, flash_object in flash_objects:
                    self.print_swf_header_info(header)
                    save_path = "{0}-FLASH-Decompressed{1}".format(
                        store_path, count)
                    with open(save_path, "wb") as flash_out:
                        flash_out.write(flash_object)

                    self.log(
                        "item", "Saved Decompressed Flash File to {0}".format(
                            save_path))
                    count += 1

            with open(store_path, "wb") as out:
                out.write(stream_content)

            self.log("info", "Saved stream to {0}".format(store_path))

        ole.close()
Example #17
0
 def meta(self):
     url = 'https://www.metascan-online.com/en/scanresult/file/{0}'.format(__sessions__.current.file.md5)
     page = requests.get(url)
     reports = []
     if '<title>Error</title>' in page.text:
         self.log('info', "No reports for opened file")
         return
     pattern = 'scanResult = (.*)};'
     match = re.search(pattern, page.text)
     raw_results = match.group(0)[13:-1]
     json_results = json.loads(raw_results)
     unprocessed = json_results['scan_results']['scan_details']
     for vendor, results in unprocessed.iteritems():
         if results['scan_result_i'] == 1:
             reports.append([vendor, string_clean(results['threat_found']), results['def_time']])
             self.log('table', dict(header=['Vendor', 'Result', 'Time'], rows=reports))
             return
Example #18
0
 def meta(self):
     url = 'https://www.metascan-online.com/en/scanresult/file/{0}'.format(__sessions__.current.file.md5)
     page = requests.get(url)
     reports = []
     if '<title>Error</title>' in page.text:
         self.log('info', "No reports for opened file")
         return
     pattern = 'scanResult = (.*)};'
     match = re.search(pattern, page.text)
     raw_results = match.group(0)[13:-1]
     json_results = json.loads(raw_results)
     unprocessed = json_results['scan_results']['scan_details']
     for vendor, results in unprocessed.iteritems():
         if results['scan_result_i'] == 1:
             reports.append([vendor, string_clean(results['threat_found']), results['def_time']])
             self.log('table', dict(header=['Vendor', 'Result', 'Time'], rows=reports))
             return
Example #19
0
    def metatimes(self, ole):
        rows = []
        rows.append(
            [
                1,
                "Root",
                "",
                ole.root.getctime() if ole.root.getctime() else "",
                ole.root.getmtime() if ole.root.getmtime() else "",
            ]
        )

        counter = 2
        for obj in ole.listdir(streams=True, storages=True):
            has_macro = ""
            try:
                if "\x00Attribu" in ole.openstream(obj).read():
                    has_macro = "Yes"
            except:
                pass

            rows.append(
                [
                    counter,
                    string_clean("/".join(obj)),
                    has_macro,
                    ole.getctime(obj) if ole.getctime(obj) else "",
                    ole.getmtime(obj) if ole.getmtime(obj) else "",
                ]
            )

            counter += 1

        print_info("OLE Structure:")
        print(table(header=["#", "Object", "Macro", "Creation", "Modified"], rows=rows))

        ole.close()
Example #20
0
File: pst.py Project: Rafiot/viper
 def email_headers(self, email_header):
     # If it came from outlook we may need to trim some lines
     new_mail = open(email_header).read()
     if 'Version 2.0\x0d\x0a' in new_mail:
         new_mail = new_mail.split('Version 2.0\x0d\x0a', 1)[1]
     # Leaving us an RFC compliant email to parse
     msg = email.message_from_string(new_mail)
     # Envelope
     envelope = [string_clean(msg.get("Subject")),
                 string_clean(msg.get("To")),
                 string_clean(msg.get("From")),
                 string_clean(msg.get("Cc")),
                 string_clean(msg.get("Bcc")),
                 string_clean(msg.get("Date"))
                 ]
     # headers
     headers = []
     for x in msg.keys():
         if x not in ['Subject', 'From', 'To', 'Date', 'Cc', 'Bcc']:
             headers.append([x, msg.get(x)])
     headers = sorted(headers, key=lambda entry: entry[0])
     return envelope, headers
Example #21
0
 def email_headers(self, email_header):
     # If it came from outlook we may need to trim some lines
     new_mail = open(email_header).read()
     if 'Version 2.0\x0d\x0a' in new_mail:
         new_mail = new_mail.split('Version 2.0\x0d\x0a', 1)[1]
     # Leaving us an RFC compliant email to parse
     msg = email.message_from_string(new_mail)
     # Envelope
     envelope = [
         string_clean(msg.get("Subject")),
         string_clean(msg.get("To")),
         string_clean(msg.get("From")),
         string_clean(msg.get("Cc")),
         string_clean(msg.get("Bcc")),
         string_clean(msg.get("Date"))
     ]
     # headers
     headers = []
     for x in msg.keys():
         if x not in ['Subject', 'From', 'To', 'Date', 'Cc', 'Bcc']:
             headers.append([x, msg.get(x)])
     headers = sorted(headers, key=lambda entry: entry[0])
     return envelope, headers
Example #22
0
    def parse_vba(self, save_path):
        """
        Parse VBA scripts.
        """
        save = False
        vbaparser = VBA_Parser(__sessions__.current.file.path)
        # Check for Macros
        if not vbaparser.detect_vba_macros():
            self.log("error", "No macros detected")
            return
        self.log("info", "Macros detected")
        # try:
        if True:
            an_results = {
                "AutoExec": [],
                "Suspicious": [],
                "IOC": [],
                "Hex String": [],
                "Base64 String": [],
                "Dridex string": [],
                "VBA string": []
            }
            for (filename, stream_path, vba_filename,
                 vba_code) in vbaparser.extract_macros():
                self.log("info", "Stream Details")
                self.log("item",
                         "OLE Stream: {0}".format(string_clean(stream_path)))
                self.log(
                    "item",
                    "VBA Filename: {0}".format(string_clean(vba_filename)))
                # Analyse the VBA Code
                vba_scanner = VBA_Scanner(vba_code)
                analysis = vba_scanner.scan(include_decoded_strings=True)
                for kw_type, keyword, description in analysis:
                    an_results[kw_type].append(
                        [string_clean_hex(keyword), description])

                # Save the code to external File
                if save_path:
                    try:
                        with open(save_path, "a") as out:
                            out.write(vba_code)
                        save = True
                    except Exception as e:
                        self.log(
                            "error",
                            "Unable to write to {0}: {1}".format(save_path, e))
                        return

            # Print all tables together
            if an_results["AutoExec"]:
                self.log("info", "Autorun macros found")
                self.log(
                    "table",
                    dict(header=["Method", "Description"],
                         rows=an_results["AutoExec"]))

            if an_results["Suspicious"]:
                self.log("info", "Suspicious keywords found")
                self.log(
                    "table",
                    dict(header=["Keyword", "Description"],
                         rows=an_results["Suspicious"]))

            if an_results["IOC"]:
                self.log("info", "Possible IOCs")
                self.log("table",
                         dict(header=["IOC", "Type"], rows=an_results["IOC"]))

            if an_results["Hex String"]:
                self.log("info", "Hex strings")
                self.log(
                    "table",
                    dict(header=["Decoded", "Raw"],
                         rows=an_results["Hex String"]))

            if an_results["Base64 String"]:
                self.log("info", "Base64 strings")
                self.log(
                    "table",
                    dict(header=["Decoded", "Raw"],
                         rows=an_results["Base64 String"]))

            if an_results["Dridex string"]:
                self.log("info", "Dridex strings")
                self.log(
                    "table",
                    dict(header=["Decoded", "Raw"],
                         rows=an_results["Dridex string"]))

            if an_results["VBA string"]:
                self.log("info", "VBA strings")
                self.log(
                    "table",
                    dict(header=["Decoded", "Raw"],
                         rows=an_results["VBA string"]))

            if save:
                self.log("success",
                         "Writing VBA Code to {0}".format(save_path))

        # Close the file
        vbaparser.close()