Ejemplo n.º 1
0
    def test_Filehitcount1(self):
        with appDB.DBClass(self.testset1, settings.__version__) as DB:
            DB.appInitDB()
            conn = DB.appConnectDB()

            entry_fields = settings.EntriesFields(EntryType=settings.__APPCOMPAT__, FilePath='C:\Temp', FileName='test123.exe')
            add_entry(DB, "TestHost01", entry_fields)
            entry_fields = settings.EntriesFields(EntryType=settings.__APPCOMPAT__, FilePath='C:\Temp', FileName='test1234.exe')
            add_entry(DB, "TestHost01", entry_fields)
            entry_fields = settings.EntriesFields(EntryType=settings.__APPCOMPAT__, FilePath='C:\\test123.exe', FileName='nohit.exe')
            add_entry(DB, "TestHost01", entry_fields)

            # Get temp db name for the test
            temp_file = tempfile.NamedTemporaryFile(suffix='.db', prefix='testCase', dir=tempfile.gettempdir())
            temp_file.close()
            with open(temp_file.name, 'w') as fh:
                fh.write('test123.exe')

            try:
                ret = main([self.testset1, "filehitcount", temp_file.name])
            except Exception as e:
                print traceback.format_exc()
                self.fail(e.message + "\n" + traceback.format_exc())

            # Remove temp file
            os.remove(temp_file.name)

            num_hits = len(ret)
            self.assertEquals(num_hits, 2, sys._getframe().f_code.co_name)
            self.assertEquals(ret[1][1][1][0], 'test123.exe', "test_Tstomp1 failed!")
            self.assertEquals(int(ret[1][1][1][1]), 1, "test_Tstomp1 failed!")
Ejemplo n.º 2
0
    def test_AppCompat_LiteralSearch_Suppressed(self):
        rndFileName = ''.join(
            random.choice(string.ascii_uppercase) for _ in range(15))
        with appDB.DBClass(self.testset1, settings.__version__) as DB:
            DB.appInitDB()
            conn = DB.appConnectDB()

            # Add 10 entries
            for i in xrange(0, 10):
                entry_fields = settings.EntriesFields(
                    EntryType=settings.__APPCOMPAT__,
                    FilePath='C:\Temp',
                    FileName=rndFileName,
                    Size=i,
                    ExecFlag='True')
                add_entry(DB, "TestHost01", entry_fields)

            # Add 10 entries which will be deduped to 1 on search
            for i in xrange(0, 10):
                entry_fields = settings.EntriesFields(
                    EntryType=settings.__APPCOMPAT__,
                    FilePath='C:\Temp',
                    FileName=rndFileName,
                    Size=1000,
                    ExecFlag='True')
                add_entry(DB, "TestHost01", entry_fields)

        # Get temp file name for the DB
        with tempfile.NamedTemporaryFile(
                suffix='.txt', prefix='Output',
                dir=tempfile.gettempdir()) as temp_file:
            # Search
            (num_hits, num_hits_suppressed, results) = main([
                "-o", temp_file.name, self.testset1, "search", "-F",
                rndFileName
            ])
            # Check we got as many hits as we expect
            self.assertTrue(
                num_hits == 10 + 10,
                sys._getframe().f_code.co_name + " num_hits: %d - %s" %
                (num_hits, self.testset1))
            # Check supression worked as expected
            self.assertTrue(
                num_hits_suppressed == 9,
                sys._getframe().f_code.co_name + " num_hits: %d" % num_hits)
            # Check output has the expected result
            self.assertEquals(
                num_hits - num_hits_suppressed,
                self.count_lines_regex(temp_file.name, rndFileName),
                sys._getframe().f_code.co_name +
                " Output regex count doesn't match num_hits!")
Ejemplo n.º 3
0
    def processFile(self, file_fullpath, hostID, instanceID, rowsData):
        rowNumber = 0
        rowValid = True
        file_object = loadFile(file_fullpath)
        csvdata = file_object.read().splitlines()[1:]
        file_object.close()

        data = csv.reader(csvdata, dialect='IngestDialect1')
        for row in data:
            for field in row:
                if b'\x00' in field:
                    settings.logger.warning(
                        "NULL byte found, ignoring bad shimcache parse: %s" %
                        field)
                    rowValid = False
            if rowValid:
                path, filename = ntpath.split(row[2])
                namedrow = settings.EntriesFields(
                    HostID=hostID,
                    EntryType=settings.__APPCOMPAT__,
                    RowNumber=rowNumber,
                    LastModified=unicode(row[0]),
                    LastUpdate=unicode(row[1]),
                    FilePath=unicode(path),
                    FileName=unicode(filename),
                    Size=unicode(row[3]),
                    ExecFlag=str(row[4]),
                    InstanceID=instanceID)
                rowsData.append(namedrow)
                rowNumber += 1
Ejemplo n.º 4
0
    def processFile(self, file_fullpath, hostID, instanceID, rowsData):
        rowNumber = 0
        file_object = loadFile(file_fullpath)
        rows = file_object.read().splitlines()[1:]
        file_object.close()

        appCompatREGEX = re.compile(
            "((?:\d\d\d\d\-\d\d\-\d\d \d\d:\d\d:\d\d)|N\/A)[, ]((?:\d\d\d\d\-\d\d\-\d\d \d\d:\d\d:\d\d)|N\/A)[, ](.*)\\\([^\\\]*)[, ](N\/A|\d*)[, ](N\/A|True|False)"
        )
        assert (rows is not None)
        for r in rows:
            if b'\x00' in r:
                logger.debug(
                    "NULL byte found, ignoring bad shimcache parse: %s" % r)
                continue
            m = appCompatREGEX.match(r)
            if m:
                namedrow = settings.EntriesFields(
                    HostID=hostID,
                    EntryType=settings.__APPCOMPAT__,
                    RowNumber=rowNumber,
                    LastModified=unicode(m.group(1)),
                    LastUpdate=unicode(m.group(2)),
                    FilePath=unicode(m.group(3)),
                    FileName=unicode(m.group(4)),
                    Size=unicode(m.group(5)),
                    ExecFlag=str(m.group(6)),
                    InstanceID=instanceID)
                rowsData.append(namedrow)
                rowNumber += 1
            else:
                logger.warning("Entry regex failed for: %s - %s" % (hostID, r))
Ejemplo n.º 5
0
    def test_AmCache_LiteralSearch(self):
        with appDB.DBClass(self.testset1, settings.__version__) as DB:
            DB.appInitDB()
            conn = DB.appConnectDB()

            for i in xrange(0, 10):
                entry_fields = settings.EntriesFields(
                    EntryType=settings.__AMCACHE__,
                    FilePath='C:\Temp',
                    FileName='calc.exe',
                    Size=i,
                    ExecFlag='True')
                add_entry(DB, "TestHost01", entry_fields)

        # Get temp file name for the DB
        with tempfile.NamedTemporaryFile(
                suffix='.txt', prefix='Output',
                dir=tempfile.gettempdir()) as temp_file:
            # Search
            (num_hits, num_hits_suppressed, results) = main([
                "-o", temp_file.name, self.testset1, "search", "-F", "calc.exe"
            ])
            # Check we got at least as many as we added into the DB
            self.assertTrue(
                num_hits >= 10,
                sys._getframe().f_code.co_name + " num_hits: %d" % num_hits)
            # Check output has the expected result
            self.assertEquals(
                num_hits, self.count_lines_regex(temp_file.name, "calc\.exe"),
                sys._getframe().f_code.co_name +
                " Output regex count doesn't match num_hits!")
Ejemplo n.º 6
0
    def processFile(self, file_fullpath, hostID, instanceID, rowsData):
        rowNumber = 0
        file_object = loadFile(file_fullpath)
        rows = _processAmCacheFile_StringIO(file_object)
        file_object.close()

        for r in rows:
            namedrow = settings.EntriesFields(
                HostID=hostID,
                EntryType=settings.__AMCACHE__,
                RowNumber=rowNumber,
                FilePath=(None if r.path == None else ntpath.dirname(r.path)),
                FileName=(None if r.path == None else ntpath.basename(r.path)),
                Size=r.size,
                ExecFlag='True',
                SHA1=(None if r.sha1 == None else r.sha1[4:]),
                FileDescription=r.file_description,
                FirstRun=r.first_run,
                Created=r.created_timestamp,
                Modified1=r.modified_timestamp,
                Modified2=r.modified_timestamp2,
                LinkerTS=r.linker_timestamp,
                Product=r.product,
                Company=r.company,
                PE_sizeofimage=r.pe_sizeofimage,
                Version_number=r.version_number,
                Version=r.version,
                Language=r.language,
                Header_hash=r.header_hash,
                PE_checksum=r.pe_checksum,
                SwitchBackContext=r.switchbackcontext,
                InstanceID=instanceID)
            rowsData.append(namedrow)
            rowNumber += 1
Ejemplo n.º 7
0
    def test_TStack(self):
        rndFileName = 'randomfilename.rnd'
        with appDB.DBClass(self.testset1, settings.__version__) as DB:
            DB.appInitDB()
            conn = DB.appConnectDB()

            # Add stuff to stack
            for i in xrange(0, 10):
                entry_fields = settings.EntriesFields(
                    EntryType=settings.__APPCOMPAT__,
                    FilePath='C:\Windows',
                    FileName=rndFileName,
                    Size=i,
                    LastModified='1000-01-01 00:00:0' + str(i))
                add_entry(DB, "TestHost01", entry_fields)

            # Run
            ret = main([self.testset1, "tstack", '1000-01-01', '1000-01-02'])

        # Check we found the right file
        self.assertEquals(ret[1][1][0], rndFileName, "test_TStack failed!")
        # Check expected in count
        self.assertEquals(int(ret[1][1][1]), 10, "test_TStack failed!")
        # Check expected out count
        self.assertEquals(int(ret[1][1][2]), 0, "test_TStack failed!")
Ejemplo n.º 8
0
    def test_Stack(self):
        rndFileName = ''.join(
            random.choice(string.ascii_uppercase) for _ in range(15))
        with appDB.DBClass(self.testset1, settings.__version__) as DB:
            DB.appInitDB()
            conn = DB.appConnectDB()

            # Add stuff to stack
            for i in xrange(0, 10):
                entry_fields = settings.EntriesFields(
                    EntryType=settings.__APPCOMPAT__,
                    FilePath='C:\Windows',
                    FileName=rndFileName,
                    Size=i,
                    ExecFlag='True')
                add_entry(DB, "TestHost01", entry_fields)

            # Run
            ret = main([
                self.testset1, "stack", "FileName", "FilePath = 'c:\Windows'"
            ])

        # Check status count == db count
        count = int([i[1][0] for i in ret if rndFileName in i[1]][0])
        self.assertEquals(count, 10, "test_Stack failed!")
Ejemplo n.º 9
0
    def test_AppCompat_IndexedSearch2(self):
        rndFileName = ''.join(
            random.choice(string.ascii_uppercase) for _ in range(20))
        with appDB.DBClass(self.testset1, settings.__version__) as DB:
            DB.appInitDB()
            conn = DB.appConnectDB()

            for i in xrange(0, 20):
                entry_fields = settings.EntriesFields(
                    EntryType=settings.__APPCOMPAT__,
                    FilePath='C:\Temp',
                    FileName=rndFileName,
                    Size=i,
                    ExecFlag='True')
                add_entry(DB, "TestHost01", entry_fields)

        # Get temp file name for the DB
        with tempfile.NamedTemporaryFile(
                suffix='.txt',
                prefix='test_AppCompat_IndexedSearch',
                dir=tempfile.gettempdir()) as temp_file_indexed:
            with tempfile.NamedTemporaryFile(
                    suffix='.txt',
                    prefix='test_AppCompat_NormalSearch',
                    dir=tempfile.gettempdir()) as temp_file_normal:
                # Indexed Search
                (num_hits, num_hits_suppressed, results) = main([
                    "-o", temp_file_indexed.name, self.testset1, "fsearch",
                    "FileName", "-F", rndFileName
                ])
                # Standard Search
                (num_hits2, num_hits_suppressed2, results2) = main([
                    "-o", temp_file_normal.name, self.testset1, "search", "-F",
                    "\\" + rndFileName
                ])
                # Check we got the same number of hits
                self.assertTrue(
                    num_hits == num_hits2,
                    sys._getframe().f_code.co_name +
                    " num_hits: %d" % num_hits)
                # Check output has the expected results
                self.assertEquals(
                    num_hits - num_hits_suppressed,
                    self.count_lines_regex(temp_file_indexed.name,
                                           rndFileName),
                    sys._getframe().f_code.co_name +
                    " Output regex count doesn't match num_hits!")
                # Check output has the expected results
                self.assertEquals(
                    num_hits2 - num_hits_suppressed2,
                    self.count_lines_regex(temp_file_normal.name, rndFileName),
                    sys._getframe().f_code.co_name +
                    " Output regex count doesn't match num_hits!")
                # Check standard and indexed search produced the same results
                self.assertTrue(
                    self.compare_output_files(temp_file_normal.name,
                                              temp_file_indexed.name),
                    "Results differ!")
Ejemplo n.º 10
0
    def processFile(self, file_fullpath, hostID, instanceID, rowsData):
        rowNumber = 0
        rowValid = True
        minSQLiteDTS = datetime(1, 1, 1, 0, 0, 0)
        maxSQLiteDTS = datetime(9999, 12, 31, 0, 0, 0)

        file_object = loadFile(file_fullpath)
        csvdata = file_object.read().splitlines()[1:]
        file_object.close()

        data = csv.reader(csvdata, dialect='IngestDialect1')
        for row in data:
            for field in row:
                if b'\x00' in field:
                    settings.logger.warning(
                        "NULL byte found, ignoring bad shimcache parse: %s" %
                        field)
                    rowValid = False

                try:
                    # Convert to timestamps:
                    if row[0] != 'N/A':
                        tmp_LastModified = datetime.strptime(
                            row[0], "%Y-%m-%d %H:%M:%S")
                    else:
                        tmp_LastModified = minSQLiteDTS
                    if row[1] != 'N/A':
                        tmp_LastUpdate = datetime.strptime(
                            row[1], "%Y-%m-%d %H:%M:%S")
                    else:
                        tmp_LastUpdate = minSQLiteDTS

                except Exception as e:
                    print("crap")
                    exc_type, exc_obj, exc_tb = sys.exc_info()
                    fname = os.path.split(
                        exc_tb.tb_frame.f_code.co_filename)[1]
                    logger.info(
                        "Exception processing row (%s): %s [%s / %s / %s]" %
                        (e.message, unicode(ntpath.split(
                            row[2])[0]), exc_type, fname, exc_tb.tb_lineno))

            if rowValid:
                path, filename = ntpath.split(row[2])
                namedrow = settings.EntriesFields(
                    HostID=hostID,
                    EntryType=settings.__APPCOMPAT__,
                    RowNumber=rowNumber,
                    LastModified=tmp_LastModified,
                    LastUpdate=tmp_LastUpdate,
                    FilePath=unicode(path),
                    FileName=unicode(filename),
                    Size=unicode(row[3]),
                    ExecFlag=str(row[4]),
                    InstanceID=instanceID)
                rowsData.append(namedrow)
                rowNumber += 1
Ejemplo n.º 11
0
    def test_Leven2(self):
        with appDB.DBClass(self.testset1, settings.__version__) as DB:
            DB.appInitDB()
            conn = DB.appConnectDB()

            # Add stuff
            entry_fields = settings.EntriesFields(EntryType=settings.__APPCOMPAT__, FilePath='C:\Windows\System32',
                FileName='svchosts.exe')
            add_entry(DB, "TestHost01", entry_fields)

            # Run
            ret = main([self.testset1, "leven"])
            # Check we found the right file
            self.assertEquals('svchosts.exe' in ret[1][1][1], True, "test_Leven2 failed!")
Ejemplo n.º 12
0
    def test_Leven(self):
        rndFileName = ''.join(random.choice(string.ascii_uppercase) for _ in range(15))
        with appDB.DBClass(self.testset1, settings.__version__) as DB:
            DB.appInitDB()
            conn = DB.appConnectDB()

            # Add stuff
            entry_fields = settings.EntriesFields(EntryType=settings.__APPCOMPAT__, FilePath='C:\Windows\System32', FileName=rndFileName)
            add_entry(DB, "TestHost01", entry_fields)

            # Run
            leven_fileName = 'a' + rndFileName
            ret = main([self.testset1, "leven", leven_fileName])
            # Check we found the right file
            self.assertEquals(ret[1][1][1], "'"+rndFileName+"'", "test_Leven failed!")
Ejemplo n.º 13
0
    def processFile(self, file_fullpath, hostID, instanceID, rowsData):
        rowNumber = 0
        check_tags = ['LastModified', 'AppCompatPath', 'ExecutionFlag']
        try:
            xml_data = loadFile(file_fullpath)
            for event, element in etree.iterparse(xml_data, events=("end",)):
                skip_entry = False
                tag_dict = {}
                if element.tag == "AppCompatItemExtended":
                    self._processElement(element, tag_dict)

                    # Check we have everything we need and ignore entries with critical XML errors on them
                    for tag in check_tags:
                        if tag not in tag_dict or tag_dict[tag] is None:
                                if 'AppCompatPath' in tag_dict:
                                    logger.warning("Malformed tag [%s: %s] in %s, entry: %s (skipping entry)" % (tag, tag_dict[tag], tag_dict['AppCompatPath'], file_fullpath))
                                else:
                                    logger.warning(
                                        "Malformed tag [%s: %s] in %s, entry: Unknown (skipping entry)" % (tag, tag_dict[tag], file_fullpath))
                                skip_entry = True
                                break

                    # If the entry is valid do some housekeeping:
                    if not skip_entry:
                        if tag_dict['ExecutionFlag'] == '1':
                            tmpExexFlag = True
                        elif tag_dict['ExecutionFlag'] == '0':
                            tmpExexFlag = False
                        else: tmpExexFlag = tag_dict['ExecutionFlag']
                        namedrow = settings.EntriesFields(HostID=hostID, EntryType=settings.__APPCOMPAT__,
                              RowNumber=rowNumber,
                              InstanceID=instanceID,
                              LastModified=(tag_dict['LastModified'].replace("T"," ").replace("Z","") if 'LastModified' in tag_dict else '0001-01-01 00:00:00'),
                              LastUpdate=(tag_dict['LastUpdate'].replace("T"," ").replace("Z","") if 'LastUpdate' in tag_dict else '0001-01-01 00:00:00'),
                              FileName=ntpath.basename(tag_dict['AppCompatPath']),
                              FilePath=ntpath.dirname(tag_dict['AppCompatPath']),
                              Size=(tag_dict['Size'] if 'Size' in tag_dict else 'N/A'),
                              ExecFlag=tmpExexFlag)
                        rowsData.append(namedrow)
                        rowNumber += 1
            else:
                pass
                element.clear()
            xml_data.close()
        except Exception as e:
            print e.message
            print traceback.format_exc()
            pass
Ejemplo n.º 14
0
    def test_AppCompat_IndexedSearchFilePath(self):
        rndFileName = ''.join(
            random.choice(string.ascii_uppercase) for _ in range(20))
        with appDB.DBClass(self.testset1, settings.__version__) as DB:
            DB.appInitDB()
            conn = DB.appConnectDB()

            for i in xrange(0, 20):
                entry_fields = settings.EntriesFields(
                    EntryType=settings.__APPCOMPAT__,
                    FilePath='C:\\' + rndFileName,
                    FileName="calc.exe",
                    Size=i,
                    ExecFlag='True')
                add_entry(DB, "TestHost01", entry_fields)

        # Get temp file name for the DB
        with tempfile.NamedTemporaryFile(
                suffix='.txt',
                prefix='test_AppCompat_IndexedSearch',
                dir=tempfile.gettempdir()) as temp_file:
            # Search
            (num_hits, num_hits_suppressed, results) = main([
                "-o", temp_file.name, self.testset1, "fsearch", "FilePath",
                "-F", "C:\\" + rndFileName
            ])
            # Check we got at least as many as we added into the DB
            self.assertTrue(
                num_hits == 20,
                sys._getframe().f_code.co_name + " num_hits: %d" % num_hits)
            # Check output has the expected result
            self.assertEquals(
                num_hits - num_hits_suppressed,
                self.count_lines_regex(temp_file.name, rndFileName),
                sys._getframe().f_code.co_name +
                " Output regex count doesn't match num_hits!")
Ejemplo n.º 15
0
    def processFile(self, file_fullpath, hostID, instanceID, rowsData):
        minSQLiteDTS = datetime(1, 1, 1, 0, 0, 0)
        maxSQLiteDTS = datetime(9999, 12, 31, 0, 0, 0)
        rowNumber = 0
        check_tags = ['LastModified', 'AppCompatPath']
        try:
            xml_data = loadFile(file_fullpath)
            for event, element in etree.iterparse(xml_data, events=("end", )):
                skip_entry = False
                tag_dict = {}
                if element.tag == "AppCompatItemExtended":
                    self._processElement(element, tag_dict)

                    # From time to time we get some entries with no real data on them for some unknown reason, skip for now
                    if 'AppCompatPath' in tag_dict:
                        if tag_dict['AppCompatPath'] == 'N/A':
                            logger.debug(
                                "ShimCache entry with no AppCompatPath (Sequence # %s) on %s. (skipping entry)"
                                % (tag_dict['Sequence'], file_fullpath))
                            break

                    # Check we have everything we need and ignore entries with critical XML errors on them
                    for tag in check_tags:
                        if tag not in tag_dict or tag_dict[tag] is None:
                            if tag not in tag_dict:
                                if 'AppCompatPath' in tag_dict:
                                    logger.warning(
                                        "Missing tag [%s] in %s, entry: %s (skipping entry)"
                                        % (tag, tag_dict['AppCompatPath'],
                                           file_fullpath))
                                else:
                                    logger.warning(
                                        "Malformed tag [%s] in %s, entry: Unknown (skipping entry)"
                                        % (tag, file_fullpath))
                                skip_entry = True
                                break
                            if tag_dict[tag] is None:
                                if 'AppCompatPath' in tag_dict:
                                    logger.warning(
                                        "Malformed tag [%s: %s] in %s, entry: %s (skipping entry)"
                                        % (tag, tag_dict[tag],
                                           tag_dict['AppCompatPath'],
                                           file_fullpath))
                                else:
                                    logger.warning(
                                        "Malformed tag [%s: %s] in %s, entry: Unknown (skipping entry)"
                                        % (tag, tag_dict[tag], file_fullpath))
                                skip_entry = True
                                break

                    # If the entry is valid do some housekeeping:
                    if not skip_entry:
                        if tag_dict['ExecutionFlag'] == '1':
                            tmpExecFlag = True
                        elif tag_dict['ExecutionFlag'] == '0':
                            tmpExecFlag = False
                        else:
                            tmpExecFlag = tag_dict['ExecutionFlag']

                        try:
                            # Convert TS to datetime format
                            if 'LastModified' in tag_dict:
                                tmp_LastModified = tag_dict[
                                    'LastModified'].replace("T", " ").replace(
                                        "Z", "")
                                if type(tmp_LastModified) is not datetime:
                                    tmp_LastModified = datetime.strptime(
                                        tmp_LastModified, "%Y-%m-%d %H:%M:%S")
                            else:
                                tmp_LastModified = minSQLiteDTS

                            if 'LastUpdate' in tag_dict:
                                tmp_LastUpdate = tag_dict[
                                    'LastUpdate'].replace("T", " ").replace(
                                        "Z", "")
                                if type(tmp_LastUpdate) is not datetime:
                                    tmp_LastUpdate = datetime.strptime(
                                        tmp_LastUpdate, "%Y-%m-%d %H:%M:%S")
                            else:
                                tmp_LastUpdate = minSQLiteDTS

                            namedrow = settings.EntriesFields(
                                HostID=hostID,
                                EntryType=settings.__APPCOMPAT__,
                                RowNumber=rowNumber,
                                InstanceID=instanceID,
                                LastModified=tmp_LastModified,
                                LastUpdate=tmp_LastUpdate,
                                FileName=ntpath.basename(
                                    tag_dict['AppCompatPath']),
                                FilePath=ntpath.dirname(
                                    tag_dict['AppCompatPath']),
                                Size=(tag_dict['Size']
                                      if 'Size' in tag_dict else 'N/A'),
                                ExecFlag=tmpExecFlag)
                            rowsData.append(namedrow)
                            rowNumber += 1
                        except Exception as e:
                            print("crap")
                            exc_type, exc_obj, exc_tb = sys.exc_info()
                            fname = os.path.split(
                                exc_tb.tb_frame.f_code.co_filename)[1]
                            logger.info(
                                "Exception processing row (%s): %s [%s / %s / %s]"
                                % (e.message, file_fullpath, exc_type, fname,
                                   exc_tb.tb_lineno))
            else:
                pass
                element.clear()
            xml_data.close()
        except Exception as e:
            print e.message
            print traceback.format_exc()
            pass
class Appcompat_miracquisition(Ingest):
    ingest_type = "appcompat_miracquisition"
    file_name_filter = "(?:.*)(?:\/|\\\)(.*)-[A-Za-z0-9]{64}-\d{1,10}-\d{1,10}(?:_octet-stream.xml)$"

    def __init__(self):
        super(Appcompat_miracquisition, self).__init__()

    def getHostName(self, file_name_fullpath):
        host_name_from_file = super(Appcompat_miracquisition,
                                    self).getHostName(file_name_fullpath)
        file_object = loadFile(file_name_fullpath)
        regf_file = pyregf.file()
        regf_file.open_file_object(file_object, "r")
        # Get control set number
        tmp_key = regf_file.get_key_by_path(r'Select')
        if tmp_key is not None:
            controlset_number = tmp_key.get_value_by_name(
                'Current').get_data_as_integer()
            # Get host name
            tmp_key = regf_file.get_key_by_path(
                r'ControlSet00' + str(controlset_number) +
                '\Control\ComputerName\ComputerName')
            host_name = tmp_key.get_value_by_name(
                'ComputerName').get_data_as_string()
            # Check Mir host name matches the one from the hive, trust but verify :)
            if host_name != host_name_from_file:
                logger.warning(
                    "Host name mismatch! (%s != %s): %s" %
                    (host_name, host_name_from_file, file_name_fullpath))
        else:
            # todo: Close everything down elegantly
            logger.error(
                "Attempting to process non-SYSTEM hive with appcompat_raw_hive plugin: %s"
                % file_name_fullpath)
            raise (Exception(
                'Attempting to process non-SYSTEM hive with appcompat_raw_hive plugin'
            ))

        # Need to close these or the memory will never get freed:
        regf_file.close()
        del regf_file
        file_object.close()
        del file_object
        return host_name

    def checkMagic(self, file_name_fullpath):
        magic_ok = False
        # Quick and dirty check
        file_object = loadFile(file_name_fullpath)
        tmp = struct.unpack('4s', file_object.read(4))
        if tmp[0] == "regf":
            # Perform a deeper check using pyregf
            regf_file = pyregf.file()
            regf_file.open_file_object(file_object, "r")
            magic_key = regf_file.get_key_by_path(r'Select')
            regf_file.close()
            del regf_file
            if magic_key is not None:
                magic_ok = True

            # Need to close these or the memory will never get freed:
            file_object.close()
            del file_object

        return magic_ok

    def calculateID(self, file_name_fullpath):
        instanceID = 0
        file_object = loadFile(file_name_fullpath)
        regf_file = pyregf.file()
        regf_file.open_file_object(file_object, "r")

        # Search for key containing ShimCache entries on all control sets
        # Use last modification time of the last modified one as instanceID
        root = regf_file.get_root_key()
        num_keys = root.get_number_of_sub_keys()
        for i in xrange(0, num_keys):
            tmp_key = root.get_sub_key(i)
            if "controlset" in tmp_key.get_name().lower():
                session_man_key = regf_file.get_key_by_path(
                    "%s\Control\Session Manager" % tmp_key.get_name())
                num_keys = session_man_key.get_number_of_sub_keys()
                for i in xrange(0, num_keys):
                    tmp_key = session_man_key.get_sub_key(i)
                    if "appcompatibility" in tmp_key.get_name().lower(
                    ) or "appcompatcache" in tmp_key.get_name().lower():
                        last_write_time = tmp_key.get_last_written_time_as_integer(
                        )
                        if last_write_time > instanceID:
                            instanceID = last_write_time
                        break

        # Need to close these or the memory will never get freed:
        regf_file.close()
        del regf_file
        file_object.close()
        del file_object
        return instanceID

    def processFile(self, file_fullpath, hostID, instanceID, rowsData):
        rowNumber = 0
        entries = None
        # Process file using ShimCacheParser
        try:
            entries = read_from_hive(file_fullpath, True)
            if not entries:
                logger.warning("[ShimCacheParser] found no entries for %s" %
                               file_fullpath)
                return False
            else:
                rows = write_it(entries, "StringIO")[1:]
        except IOError, err:
            logger.error("[ShimCacheParser] Error opening binary file: %s" %
                         str(err))

        # Process records
        appCompatREGEX = re.compile(
            "((?:\d\d\d\d\-\d\d\-\d\d \d\d:\d\d:\d\d)|N\/A)[, ]((?:\d\d\d\d\-\d\d\-\d\d \d\d:\d\d:\d\d)|N\/A)[, ](.*)\\\([^\\\]*)[, ](N\/A|\d*)[, ](N\/A|True|False)"
        )
        assert (rows is not None)
        for r in rows:
            if b'\x00' in r:
                logger.debug(
                    "NULL byte found, skipping bad shimcache parse: %s" % r)
                continue
            m = appCompatREGEX.match(r)
            if m:
                namedrow = settings.EntriesFields(
                    HostID=hostID,
                    EntryType=settings.__APPCOMPAT__,
                    RowNumber=rowNumber,
                    LastModified=unicode(m.group(1)),
                    LastUpdate=unicode(m.group(2)),
                    FilePath=unicode(m.group(3)),
                    FileName=unicode(m.group(4)),
                    Size=unicode(m.group(5)),
                    ExecFlag=str(m.group(6)),
                    InstanceID=instanceID)
                rowsData.append(namedrow)
                rowNumber += 1
            else:
                logger.warning("Entry regex failed for: %s - %s" % (hostID, r))
Ejemplo n.º 17
0
class Appcompat_mirregistryaudit(Ingest):
    ingest_type = "appcompat_mirregistryaudit"
    file_name_filter = "(?:.*)(?:\/|\\\)(.*)-[A-Za-z0-9]{64}-\d{1,10}-\d{1,10}(?:_w32registry.xml)$"

    def __init__(self):
        super(Appcompat_mirregistryaudit, self).__init__()

    def calculateID(self, file_name_fullpath):
        instanceID = datetime.min
        tmp_instanceID = None

        try:
            file_object = loadFile(file_name_fullpath)
            root = ET.parse(file_object).getroot()
            file_object.close()
            for reg_key in root.findall('RegistryItem'):
                tmp_reg_key = reg_key.find('Modified')
                if tmp_reg_key is not None:
                    reg_modified = tmp_reg_key.text
                    try:
                        tmp_instanceID = datetime.strptime(reg_modified, "%Y-%m-%dT%H:%M:%SZ")
                    except ValueError as e:
                        tmp_instanceID = datetime.max
                        logger.warning("Invalid reg_modified date found!: %s (%s)" % (reg_modified, file_name_fullpath))
                    if instanceID < tmp_instanceID:
                        instanceID = tmp_instanceID
                else:
                    logger.warning("Found RegistryItem with no Modified date (Mir bug?): %s" % file_name_fullpath)
        except Exception:
            logger.exception("Error on calculateID for: %s" % file_name_fullpath)

        # If we found no Modified date in any of the RegistryItems we go with plan B (but most probably ShimCacheParser will fail to parse anyway)
        if instanceID is None:
            file_object = loadFile(file_name_fullpath)
            content = file_object.read()
            instanceID = hashlib.md5(content).hexdigest()
            file_object.close()

        return instanceID


    def checkMagic(self, file_name_fullpath):
        # As long as we find one AppcompatCache key we're declaring it good for us
        file_object = loadFile(file_name_fullpath)
        try:
            root = ET.parse(file_object).getroot()
            for reg_key in root.findall('RegistryItem'):
                if reg_key.find('ValueName').text == "AppCompatCache":
                    return True
        except Exception:
            logger.warning("[%s] Failed to parse XML for: %s" % (self.ingest_type, file_name_fullpath))
            #traceback.print_exc(file=sys.stdout)
        finally:
            file_object.close()

        return False


    def processFile(self, file_fullpath, hostID, instanceID, rowsData):
        # Returns data in rowsData
        rowNumber = 0
        # Process file using ShimCacheParser
        try:
            xml_data = loadFile(file_fullpath)
            (error, entries) = read_mir(xml_data, True)
            xml_data.close()

            assert(not error)
            if not entries:
                logger.warning("[ShimCacheParser] found no entries for %s" % file_fullpath)
                return False
            else:
                rows = write_it(entries, "StringIO")[1:]
        except IOError, err:
            logger.error("[ShimCacheParser] Error opening binary file: %s" % str(err))

        # Process records
        appCompatREGEX = re.compile(
            "((?:\d\d\d\d\-\d\d\-\d\d \d\d:\d\d:\d\d)|N\/A)[, ]((?:\d\d\d\d\-\d\d\-\d\d \d\d:\d\d:\d\d)|N\/A)[, ](.*)\\\([^\\\]*)[, ](N\/A|\d*)[, ](N\/A|True|False)")
        assert (rows is not None)
        for r in rows:
            if b'\x00' in r:
                logger.debug("NULL byte found, skipping bad shimcache parse: %s" % r)
                continue
            m = appCompatREGEX.match(r)
            if m:
                namedrow = settings.EntriesFields(HostID=hostID, EntryType=settings.__APPCOMPAT__, RowNumber=rowNumber,
                                                  LastModified=unicode(m.group(1)), LastUpdate=unicode(m.group(2)),
                                                  FilePath=unicode(m.group(3)),
                                                  FileName=unicode(m.group(4)), Size=unicode(m.group(5)),
                                                  ExecFlag=str(m.group(6)), InstanceID=instanceID)
                rowsData.append(namedrow)
                rowNumber += 1
            else:
                logger.warning("Entry regex failed for: %s - %s" % (hostID, r))
Ejemplo n.º 18
0
    def processFile(self, file_fullpath, hostID, instanceID, rowsData):
        rowNumber = 0
        check_tags = ['AmCacheLastModified2']
        try:
            xml_data = loadFile(file_fullpath)
            for event, element in etree.iterparse(xml_data, events=("end",)):
                skip_entry = False
                tag_dict = {}
                if element.tag == "AmCacheItem":
                    self._processElement(element, tag_dict)

                    # Check we have everything we need and ignore entries with critical XML errors on them
                    for tag in check_tags:
                        if tag not in tag_dict:
                                if 'AmCacheFilePath' in tag_dict:
                                    logger.warning("Missing tag [%s] in %s, entry: %s (skipping entry)" % (tag, tag_dict['AmCacheFilePath'], file_fullpath))
                                else:
                                    logger.warning("Malformed tag [%s] in %s, entry: Unknown (skipping entry)" % (tag, file_fullpath))
                                skip_entry = True
                                break
                        if tag_dict[tag] is None:
                                if 'AmCacheFilePath' in tag_dict:
                                    logger.warning("Malformed tag [%s: %s] in %s, entry: %s (skipping entry)" % (tag, tag_dict[tag], tag_dict['AmCacheFilePath'], file_fullpath))
                                else:
                                    logger.warning("Malformed tag [%s: %s] in %s, entry: Unknown (skipping entry)" % (tag, tag_dict[tag], file_fullpath))
                                skip_entry = True
                                break

                    # Some entries in AmCache do not refer to files per se (like installed program entries)
                    # We don't have much use for them right now but let's keep the data there until I figure what to do with them
                    if 'AmCacheFilePath' not in tag_dict:
                        if 'ProgramName' in tag_dict:
                            tag_dict['AmCacheFilePath'] = tag_dict['ProgramName']
                        else:
                            # If we have no thing we can use here we skip the entry for now
                            # todo: pretty-print the tag_dict to the log file
                            logger.warning("AmCache entry with no AppCompatPath or ProgramName. (skipping entry)")
                            break

                    # If the entry is valid do some housekeeping:
                    if not skip_entry:
                        if 'ExecutionFlag' in tag_dict:
                            if tag_dict['ExecutionFlag'] == '1':
                                tmpExecFlag = True
                            elif tag_dict['ExecutionFlag'] == '0':
                                tmpExecFlag = False
                            else: tmpExecFlag = tag_dict['ExecutionFlag']
                        else:
                            # todo: Not all OS's have exec flag. Need to change the schema to reflect those cases!
                            tmpExecFlag = False

                        namedrow = settings.EntriesFields(HostID=hostID, EntryType=settings.__APPCOMPAT__,
                          RowNumber=rowNumber,
                          InstanceID=instanceID,
                          LastModified=(tag_dict['LastModified'].replace("T"," ").replace("Z","") if 'LastModified' in tag_dict else '0001-01-01 00:00:00'),
                          LastUpdate=(tag_dict['LastUpdate'].replace("T"," ").replace("Z","") if 'LastUpdate' in tag_dict else '0001-01-01 00:00:00'),
                          FileName=ntpath.basename(tag_dict['AmCacheFilePath']),
                          FilePath=ntpath.dirname(tag_dict['AmCacheFilePath']),
                          Size=(tag_dict['Size'] if 'Size' in tag_dict else 'N/A'),
                          ExecFlag=tmpExecFlag)
                        rowsData.append(namedrow)
                        rowNumber += 1
            else:
                pass
                element.clear()
            xml_data.close()
        except Exception as e:
            print e.message
            print traceback.format_exc()
            pass
Ejemplo n.º 19
0
    def test_TcorrTest_prog1(self):
        with appDB.DBClass(self.testset1, settings.__version__) as DB:
            DB.appInitDB()
            conn = DB.appConnectDB()

            # TestHost01
            entry_fields = settings.EntriesFields(
                EntryType=settings.__APPCOMPAT__,
                FilePath='C:\Temp',
                FileName='AAA.exe',
                Size=1,
                ExecFlag='True')
            add_entry(DB, "TestHost01", entry_fields)
            entry_fields = settings.EntriesFields(
                EntryType=settings.__APPCOMPAT__,
                FilePath='C:\Temp',
                FileName='BBB.exe',
                Size=1,
                ExecFlag='True')
            add_entry(DB, "TestHost01", entry_fields)
            entry_fields = settings.EntriesFields(
                EntryType=settings.__APPCOMPAT__,
                FilePath='C:\Temp',
                FileName='CCC.exe',
                Size=1,
                ExecFlag='True')
            add_entry(DB, "TestHost01", entry_fields)
            entry_fields = settings.EntriesFields(
                EntryType=settings.__APPCOMPAT__,
                FilePath='C:\Temp',
                FileName='DDD.exe',
                Size=1,
                ExecFlag='True')
            add_entry(DB, "TestHost01", entry_fields)
            entry_fields = settings.EntriesFields(
                EntryType=settings.__APPCOMPAT__,
                FilePath='C:\Temp',
                FileName='EEE.exe',
                Size=1,
                ExecFlag='True')
            add_entry(DB, "TestHost01", entry_fields)
            entry_fields = settings.EntriesFields(
                EntryType=settings.__APPCOMPAT__,
                FilePath='C:\Temp',
                FileName='FFF.exe',
                Size=1,
                ExecFlag='True')
            add_entry(DB, "TestHost01", entry_fields)
            entry_fields = settings.EntriesFields(
                EntryType=settings.__APPCOMPAT__,
                FilePath='C:\Temp',
                FileName='GGG.exe',
                Size=1,
                ExecFlag='True')
            add_entry(DB, "TestHost01", entry_fields)

            # TestHost02
            entry_fields = settings.EntriesFields(
                EntryType=settings.__APPCOMPAT__,
                FilePath='C:\Temp',
                FileName='AAA.exe',
                Size=1,
                ExecFlag='True')
            add_entry(DB, "TestHost02", entry_fields)
            entry_fields = settings.EntriesFields(
                EntryType=settings.__APPCOMPAT__,
                FilePath='C:\Temp',
                FileName='BBB.exe',
                Size=1,
                ExecFlag='True')
            add_entry(DB, "TestHost02", entry_fields)
            entry_fields = settings.EntriesFields(
                EntryType=settings.__APPCOMPAT__,
                FilePath='C:\Temp',
                FileName='CCC.exe',
                Size=1,
                ExecFlag='True')
            add_entry(DB, "TestHost02", entry_fields)
            entry_fields = settings.EntriesFields(
                EntryType=settings.__APPCOMPAT__,
                FilePath='C:\Temp',
                FileName='DDD.exe',
                Size=1,
                ExecFlag='True')
            add_entry(DB, "TestHost02", entry_fields)
            entry_fields = settings.EntriesFields(
                EntryType=settings.__APPCOMPAT__,
                FilePath='C:\Temp',
                FileName='EEE.exe',
                Size=1,
                ExecFlag='True')
            add_entry(DB, "TestHost02", entry_fields)
            entry_fields = settings.EntriesFields(
                EntryType=settings.__APPCOMPAT__,
                FilePath='C:\Temp',
                FileName='FFF.exe',
                Size=1,
                ExecFlag='True')
            add_entry(DB, "TestHost02", entry_fields)
            entry_fields = settings.EntriesFields(
                EntryType=settings.__APPCOMPAT__,
                FilePath='C:\Temp',
                FileName='GGG.exe',
                Size=1,
                ExecFlag='True')
            add_entry(DB, "TestHost02", entry_fields)

            try:
                directCorrelationData = main(
                    [self.testset1, "tcorr", "DDD.exe", "-w 1"])
            except Exception as e:
                print traceback.format_exc()
                self.fail(e.message + "\n" + traceback.format_exc())

            # Check Names
            self.assertEquals(directCorrelationData[1][3], "CCC.exe",
                              "test_TcorrTest_prog1 - Name failed!")
            self.assertEquals(directCorrelationData[0][3], "EEE.exe",
                              "test_TcorrTest_prog1 - Name failed!")
            # Check Before
            self.assertEquals(directCorrelationData[1][6], 0,
                              "test_TcorrTest_prog1 - Name failed!")
            self.assertEquals(directCorrelationData[0][6], 2,
                              "test_TcorrTest_prog1 - Name failed!")
            # Check After
            self.assertEquals(directCorrelationData[1][7], 2,
                              "test_TcorrTest_prog1 - Name failed!")
            self.assertEquals(directCorrelationData[0][7], 0,
                              "test_TcorrTest_prog1 - Name failed!")
            # Check InvBond
            self.assertEquals(directCorrelationData[1][9], "True",
                              "test_TcorrTest_prog1 - Name failed!")
            self.assertEquals(directCorrelationData[0][9], "True",
                              "test_TcorrTest_prog1 - Name failed!")
            # Check Total_Count
            self.assertEquals(directCorrelationData[1][10], 2,
                              "test_TcorrTest_prog1 - Name failed!")
            self.assertEquals(directCorrelationData[0][10], 2,
                              "test_TcorrTest_prog1 - Name failed!")

            try:
                directCorrelationData = main(
                    [self.testset1, "tcorr", "DDD.exe", "-w 2"])
            except Exception as e:
                print traceback.format_exc()
                self.fail(e.message + "\n" + traceback.format_exc())

            # Check Names
            self.assertEquals(directCorrelationData[0][3], "CCC.exe",
                              "test_TcorrTest_prog1 - Name failed!")
            self.assertEquals(directCorrelationData[1][3], "EEE.exe",
                              "test_TcorrTest_prog1 - Name failed!")
            self.assertEquals(directCorrelationData[2][3], "BBB.exe",
                              "test_TcorrTest_prog1 - Name failed!")
            self.assertEquals(directCorrelationData[3][3], "FFF.exe",
                              "test_TcorrTest_prog1 - Name failed!")
            # Check Before
            self.assertEquals(directCorrelationData[0][6], 0,
                              "test_TcorrTest_prog1 - Name failed!")
            self.assertEquals(directCorrelationData[1][6], 2,
                              "test_TcorrTest_prog1 - Name failed!")
            self.assertEquals(directCorrelationData[2][6], 0,
                              "test_TcorrTest_prog1 - Name failed!")
            self.assertEquals(directCorrelationData[3][6], 2,
                              "test_TcorrTest_prog1 - Name failed!")
            # Check After
            self.assertEquals(directCorrelationData[0][7], 2,
                              "test_TcorrTest_prog1 - Name failed!")
            self.assertEquals(directCorrelationData[1][7], 0,
                              "test_TcorrTest_prog1 - Name failed!")
            self.assertEquals(directCorrelationData[2][7], 2,
                              "test_TcorrTest_prog1 - Name failed!")
            self.assertEquals(directCorrelationData[3][7], 0,
                              "test_TcorrTest_prog1 - Name failed!")
            # Check InvBond
            self.assertEquals(directCorrelationData[0][9], "True",
                              "test_TcorrTest_prog1 - Name failed!")
            self.assertEquals(directCorrelationData[1][9], "True",
                              "test_TcorrTest_prog1 - Name failed!")
            self.assertEquals(directCorrelationData[2][9], "True",
                              "test_TcorrTest_prog1 - Name failed!")
            self.assertEquals(directCorrelationData[3][9], "True",
                              "test_TcorrTest_prog1 - Name failed!")
            # Check Total_Count
            self.assertEquals(directCorrelationData[0][10], 2,
                              "test_TcorrTest_prog1 - Name failed!")
            self.assertEquals(directCorrelationData[1][10], 2,
                              "test_TcorrTest_prog1 - Name failed!")
            self.assertEquals(directCorrelationData[2][10], 2,
                              "test_TcorrTest_prog1 - Name failed!")
            self.assertEquals(directCorrelationData[3][10], 2,
                              "test_TcorrTest_prog1 - Name failed!")
            # Check Weight
            self.assertTrue(
                directCorrelationData[0][8] > directCorrelationData[2][8],
                "test_TcorrTest_prog1 - Name failed!")
            self.assertTrue(
                directCorrelationData[0][8] > directCorrelationData[3][8],
                "test_TcorrTest_prog1 - Name failed!")
            self.assertTrue(
                directCorrelationData[1][8] > directCorrelationData[2][8],
                "test_TcorrTest_prog1 - Name failed!")
            self.assertTrue(
                directCorrelationData[1][8] > directCorrelationData[3][8],
                "test_TcorrTest_prog1 - Name failed!")
            self.assertTrue(
                directCorrelationData[0][8] == directCorrelationData[1][8],
                "test_TcorrTest_prog1 - Name failed!")
            self.assertTrue(
                directCorrelationData[2][8] == directCorrelationData[3][8],
                "test_TcorrTest_prog1 - Name failed!")

            try:
                directCorrelationData = main(
                    [self.testset1, "tcorr", "DDD.exe", "-w 3"])
            except Exception as e:
                print traceback.format_exc()
                self.fail(e.message + "\n" + traceback.format_exc())

            # Check Names
            self.assertEquals(directCorrelationData[0][3], "CCC.exe",
                              "test_TcorrTest_prog1 - Name failed!")
            self.assertEquals(directCorrelationData[1][3], "EEE.exe",
                              "test_TcorrTest_prog1 - Name failed!")
            self.assertEquals(directCorrelationData[2][3], "BBB.exe",
                              "test_TcorrTest_prog1 - Name failed!")
            self.assertEquals(directCorrelationData[3][3], "FFF.exe",
                              "test_TcorrTest_prog1 - Name failed!")
            self.assertEquals(directCorrelationData[4][3], "AAA.exe",
                              "test_TcorrTest_prog1 - Name failed!")
            self.assertEquals(directCorrelationData[5][3], "GGG.exe",
                              "test_TcorrTest_prog1 - Name failed!")
            # Check Before
            self.assertEquals(directCorrelationData[0][6], 0,
                              "test_TcorrTest_prog1 - Name failed!")
            self.assertEquals(directCorrelationData[1][6], 2,
                              "test_TcorrTest_prog1 - Name failed!")
            self.assertEquals(directCorrelationData[2][6], 0,
                              "test_TcorrTest_prog1 - Name failed!")
            self.assertEquals(directCorrelationData[3][6], 2,
                              "test_TcorrTest_prog1 - Name failed!")
            self.assertEquals(directCorrelationData[4][6], 0,
                              "test_TcorrTest_prog1 - Name failed!")
            self.assertEquals(directCorrelationData[5][6], 2,
                              "test_TcorrTest_prog1 - Name failed!")
            # Check After
            self.assertEquals(directCorrelationData[0][7], 2,
                              "test_TcorrTest_prog1 - Name failed!")
            self.assertEquals(directCorrelationData[1][7], 0,
                              "test_TcorrTest_prog1 - Name failed!")
            self.assertEquals(directCorrelationData[2][7], 2,
                              "test_TcorrTest_prog1 - Name failed!")
            self.assertEquals(directCorrelationData[3][7], 0,
                              "test_TcorrTest_prog1 - Name failed!")
            self.assertEquals(directCorrelationData[4][7], 2,
                              "test_TcorrTest_prog1 - Name failed!")
            self.assertEquals(directCorrelationData[5][7], 0,
                              "test_TcorrTest_prog1 - Name failed!")
            # Check InvBond
            self.assertEquals(directCorrelationData[0][9], "True",
                              "test_TcorrTest_prog1 - Name failed!")
            self.assertEquals(directCorrelationData[1][9], "True",
                              "test_TcorrTest_prog1 - Name failed!")
            self.assertEquals(directCorrelationData[2][9], "True",
                              "test_TcorrTest_prog1 - Name failed!")
            self.assertEquals(directCorrelationData[3][9], "True",
                              "test_TcorrTest_prog1 - Name failed!")
            self.assertEquals(directCorrelationData[4][9], "True",
                              "test_TcorrTest_prog1 - Name failed!")
            self.assertEquals(directCorrelationData[5][9], "True",
                              "test_TcorrTest_prog1 - Name failed!")
            # Check Total_Count
            self.assertEquals(directCorrelationData[0][10], 2,
                              "test_TcorrTest_prog1 - Name failed!")
            self.assertEquals(directCorrelationData[1][10], 2,
                              "test_TcorrTest_prog1 - Name failed!")
            self.assertEquals(directCorrelationData[2][10], 2,
                              "test_TcorrTest_prog1 - Name failed!")
            self.assertEquals(directCorrelationData[3][10], 2,
                              "test_TcorrTest_prog1 - Name failed!")
            self.assertEquals(directCorrelationData[4][10], 2,
                              "test_TcorrTest_prog1 - Name failed!")
            self.assertEquals(directCorrelationData[5][10], 2,
                              "test_TcorrTest_prog1 - Name failed!")
            # Check Weight
            self.assertTrue(
                directCorrelationData[0][8] > directCorrelationData[2][8],
                "test_TcorrTest_prog1 - Name failed!")
            self.assertTrue(
                directCorrelationData[0][8] > directCorrelationData[3][8],
                "test_TcorrTest_prog1 - Name failed!")
            self.assertTrue(
                directCorrelationData[0][8] > directCorrelationData[4][8],
                "test_TcorrTest_prog1 - Name failed!")
            self.assertTrue(
                directCorrelationData[0][8] > directCorrelationData[5][8],
                "test_TcorrTest_prog1 - Name failed!")
            self.assertTrue(
                directCorrelationData[1][8] > directCorrelationData[2][8],
                "test_TcorrTest_prog1 - Name failed!")
            self.assertTrue(
                directCorrelationData[1][8] > directCorrelationData[3][8],
                "test_TcorrTest_prog1 - Name failed!")
            self.assertTrue(
                directCorrelationData[1][8] > directCorrelationData[4][8],
                "test_TcorrTest_prog1 - Name failed!")
            self.assertTrue(
                directCorrelationData[1][8] > directCorrelationData[5][8],
                "test_TcorrTest_prog1 - Name failed!")
            self.assertTrue(
                directCorrelationData[0][8] == directCorrelationData[1][8],
                "test_TcorrTest_prog1 - Name failed!")
            self.assertTrue(
                directCorrelationData[2][8] == directCorrelationData[3][8],
                "test_TcorrTest_prog1 - Name failed!")
            self.assertTrue(
                directCorrelationData[4][8] == directCorrelationData[5][8],
                "test_TcorrTest_prog1 - Name failed!")

            # TestHost03
            entry_fields = settings.EntriesFields(
                EntryType=settings.__APPCOMPAT__,
                FilePath='C:\Temp',
                FileName='AAA.exe',
                Size=1,
                ExecFlag='True')
            add_entry(DB, "TestHost03", entry_fields)
            entry_fields = settings.EntriesFields(
                EntryType=settings.__APPCOMPAT__,
                FilePath='C:\Temp',
                FileName='BBB.exe',
                Size=1,
                ExecFlag='True')
            add_entry(DB, "TestHost03", entry_fields)
            entry_fields = settings.EntriesFields(
                EntryType=settings.__APPCOMPAT__,
                FilePath='C:\Temp',
                FileName='CCC.exe',
                Size=1,
                ExecFlag='True')
            add_entry(DB, "TestHost03", entry_fields)
            entry_fields = settings.EntriesFields(
                EntryType=settings.__APPCOMPAT__,
                FilePath='C:\Temp',
                FileName='DDD.exe',
                Size=1,
                ExecFlag='True')
            add_entry(DB, "TestHost03", entry_fields)
            entry_fields = settings.EntriesFields(
                EntryType=settings.__APPCOMPAT__,
                FilePath='C:\Temp',
                FileName='EEE.exe',
                Size=1,
                ExecFlag='True')
            add_entry(DB, "TestHost03", entry_fields)
            entry_fields = settings.EntriesFields(
                EntryType=settings.__APPCOMPAT__,
                FilePath='C:\Temp',
                FileName='FFF.exe',
                Size=1,
                ExecFlag='True')
            add_entry(DB, "TestHost03", entry_fields)
            entry_fields = settings.EntriesFields(
                EntryType=settings.__APPCOMPAT__,
                FilePath='C:\Temp',
                FileName='GGG.exe',
                Size=1,
                ExecFlag='True')
            add_entry(DB, "TestHost03", entry_fields)

            try:
                directCorrelationData = main(
                    [self.testset1, "tcorr", "DDD.exe", "-w 1"])
            except Exception as e:
                print traceback.format_exc()
                self.fail(e.message + "\n" + traceback.format_exc())

            # Check Names
            self.assertEquals(directCorrelationData[0][3], "CCC.exe",
                              "test_TcorrTest_prog1 - Name failed!")
            self.assertEquals(directCorrelationData[1][3], "EEE.exe",
                              "test_TcorrTest_prog1 - Name failed!")
            # Check Before
            self.assertEquals(directCorrelationData[0][6], 0,
                              "test_TcorrTest_prog1 - Name failed!")
            self.assertEquals(directCorrelationData[1][6], 3,
                              "test_TcorrTest_prog1 - Name failed!")
            # Check After
            self.assertEquals(directCorrelationData[0][7], 3,
                              "test_TcorrTest_prog1 - Name failed!")
            self.assertEquals(directCorrelationData[1][7], 0,
                              "test_TcorrTest_prog1 - Name failed!")
            # Check InvBond
            self.assertEquals(directCorrelationData[0][9], "True",
                              "test_TcorrTest_prog1 - Name failed!")
            self.assertEquals(directCorrelationData[1][9], "True",
                              "test_TcorrTest_prog1 - Name failed!")
            # Check Total_Count
            self.assertEquals(directCorrelationData[0][10], 3,
                              "test_TcorrTest_prog1 - Name failed!")
            self.assertEquals(directCorrelationData[1][10], 3,
                              "test_TcorrTest_prog1 - Name failed!")
Ejemplo n.º 20
0
class Appcompat_Raw_hive(Ingest):
    ingest_type = "appcompat_raw_hive"
    file_name_filter = "(?:.*)(?:\/|\\\)SYSTEM.*$"

    def __init__(self):
        super(Appcompat_Raw_hive, self).__init__()

    def getHostName(self, file_name_fullpath):
        file_object = loadFile(file_name_fullpath)
        regf_file = pyregf.file()
        regf_file.open_file_object(file_object, "r")
        # Get control set number
        tmp_key = regf_file.get_key_by_path(r'Select')
        if tmp_key is not None:
            controlset_number = tmp_key.get_value_by_name(
                'Current').get_data_as_integer()
            # Get host name
            tmp_key = regf_file.get_key_by_path(
                r'ControlSet00' + str(controlset_number) +
                '\Control\ComputerName\ComputerName')
            host_name = tmp_key.get_value_by_name(
                'ComputerName').get_data_as_string()
        else:
            # todo: Close everything down elegantly
            logger.error(
                "Attempting to process non-SYSTEM hive with appcompat_raw_hive plugin: %s"
                % file_name_fullpath)
            raise (Exception(
                'Attempting to process non-SYSTEM hive with appcompat_raw_hive plugin'
            ))

        # Need to close these or the memory will never get freed:
        regf_file.close()
        del regf_file
        file_object.close()
        del file_object
        return host_name

    def checkMagic(self, file_name_fullpath):
        magic_ok = False
        # Check magic
        magic_id = self.id_filename(file_name_fullpath)
        if 'registry' in magic_id:
            file_object = loadFile(file_name_fullpath)
            regf_file = pyregf.file()
            regf_file.open_file_object(file_object, "r")
            magic_key = regf_file.get_key_by_path(r'Select')
            regf_file.close()
            del regf_file
            if magic_key is not None:
                magic_ok = True

            # Need to close these or the memory will never get freed:
            file_object.close()
            del file_object

        return magic_ok

    def calculateID(self, file_name_fullpath):
        instanceID = 0
        file_object = loadFile(file_name_fullpath)
        regf_file = pyregf.file()
        regf_file.open_file_object(file_object, "r")

        # Search for key containing ShimCache entries on all control sets
        # Use last modification time of the last modified one as instanceID
        root = regf_file.get_root_key()
        num_keys = root.get_number_of_sub_keys()
        for i in xrange(0, num_keys):
            tmp_key = root.get_sub_key(i)
            if "controlset" in tmp_key.get_name().lower():
                session_man_key = regf_file.get_key_by_path(
                    "%s\Control\Session Manager" % tmp_key.get_name())
                num_keys = session_man_key.get_number_of_sub_keys()
                for i in xrange(0, num_keys):
                    tmp_key = session_man_key.get_sub_key(i)
                    if "appcompatibility" in tmp_key.get_name().lower(
                    ) or "appcompatcache" in tmp_key.get_name().lower():
                        last_write_time = tmp_key.get_last_written_time_as_integer(
                        )
                        if last_write_time > instanceID:
                            instanceID = last_write_time
                        break

        # Need to close these or the memory will never get freed:
        regf_file.close()
        del regf_file
        file_object.close()
        del file_object
        return instanceID

    def processFile(self, file_fullpath, hostID, instanceID, rowsData):
        rowNumber = 0
        entries = None
        minSQLiteDTS = datetime(1, 1, 1, 0, 0, 0)
        maxSQLiteDTS = datetime(9999, 12, 31, 0, 0, 0)

        # Process file using ShimCacheParser
        try:
            entries = read_from_hive(loadFile(file_fullpath), True)
            if not entries:
                logger.warning("[ShimCacheParser] found no entries for %s" %
                               file_fullpath)
                return False
            else:
                rows = write_it(entries, "StringIO")[1:]
        except IOError, err:
            logger.error("[ShimCacheParser] Error opening binary file: %s" %
                         str(err))

        # Process records
        appCompatREGEX = re.compile(
            "((?:\d\d\d\d\-\d\d\-\d\d \d\d:\d\d:\d\d)|N\/A)[, ]((?:\d\d\d\d\-\d\d\-\d\d \d\d:\d\d:\d\d)|N\/A)[, ](.*)\\\([^\\\]*)[, ](N\/A|\d*)[, ](N\/A|True|False)"
        )
        assert (rows is not None)
        for r in rows:
            if b'\x00' in r:
                logger.debug(
                    "NULL byte found, skipping bad shimcache parse: %s" % r)
                continue
            m = appCompatREGEX.match(r)
            if m:
                try:
                    # Convert to timestamps:
                    if m.group(1) != 'N/A':
                        tmp_LastModified = datetime.strptime(
                            m.group(1), "%Y-%m-%d %H:%M:%S")
                    else:
                        tmp_LastModified = minSQLiteDTS
                    if m.group(2) != 'N/A':
                        tmp_LastUpdate = datetime.strptime(
                            m.group(2), "%Y-%m-%d %H:%M:%S")
                    else:
                        tmp_LastUpdate = minSQLiteDTS

                except Exception as e:
                    print("crap")
                    exc_type, exc_obj, exc_tb = sys.exc_info()
                    fname = os.path.split(
                        exc_tb.tb_frame.f_code.co_filename)[1]
                    logger.info(
                        "Exception processing row (%s): %s [%s / %s / %s]" %
                        (e.message, file_fullpath, exc_type, fname,
                         exc_tb.tb_lineno))

                namedrow = settings.EntriesFields(
                    HostID=hostID,
                    EntryType=settings.__APPCOMPAT__,
                    RowNumber=rowNumber,
                    LastModified=tmp_LastModified,
                    LastUpdate=tmp_LastUpdate,
                    FilePath=unicode(m.group(3)),
                    FileName=unicode(m.group(4)),
                    Size=unicode(m.group(5)),
                    ExecFlag=str(m.group(6)),
                    InstanceID=instanceID)
                rowsData.append(namedrow)
                rowNumber += 1
            else:
                logger.warning("Entry regex failed for: %s - %s" % (hostID, r))
Ejemplo n.º 21
0
    def processFile(self, file_fullpath, hostID, instanceID, rowsData):
        # Returns data in rowsData
        minSQLiteDTS = datetime(1, 1, 1, 0, 0, 0)
        maxSQLiteDTS = datetime(9999, 12, 31, 0, 0, 0)
        rowNumber = 0

        check_tags = ['LastModified', 'AppCompatPath']
        try:
            # Process file using ShimCacheParser
            try:
                xml_data = loadFile(file_fullpath)
                (error, entries) = read_mir(xml_data, True)
                xml_data.close()

                assert(not error)
                if not entries:
                    logger.warning("[ShimCacheParser] found no entries for %s" % file_fullpath)
                    return False
                else:
                    rows = write_it(entries, "StringIO")[1:]
            except IOError, err:
                logger.error("[ShimCacheParser] Error opening binary file: %s" % str(err))

            # Process records
            appCompatREGEX = re.compile(
                "((?:\d\d\d\d\-\d\d\-\d\d \d\d:\d\d:\d\d)|N\/A)[, ]((?:\d\d\d\d\-\d\d\-\d\d \d\d:\d\d:\d\d)|N\/A)[, ](.*)\\\([^\\\]*)[, ](N\/A|\d*)[, ](N\/A|True|False)")
            assert (rows is not None)
            for r in rows:
                if b'\x00' in r:
                    logger.debug("NULL byte found, skipping bad shimcache parse: %s" % r)
                    continue
                m = appCompatREGEX.match(r)
                if m:
                    try:
                        # Convert to timestamps:
                        if m.group(1) != 'N/A':
                            tmp_LastModified = datetime.strptime(m.group(1), "%Y-%m-%d %H:%M:%S")
                        else:
                            tmp_LastModified = minSQLiteDTS
                        if m.group(2) != 'N/A':
                            tmp_LastUpdate = datetime.strptime(m.group(2), "%Y-%m-%d %H:%M:%S")
                        else:
                            tmp_LastUpdate = minSQLiteDTS

                    except Exception as e:
                        print("crap")
                        exc_type, exc_obj, exc_tb = sys.exc_info()
                        fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
                        logger.info("Exception processing row (%s): %s [%s / %s / %s]" % (
                            e.message, file_fullpath, exc_type, fname, exc_tb.tb_lineno))

                    namedrow = settings.EntriesFields(HostID=hostID, EntryType=settings.__APPCOMPAT__,
                                                      RowNumber=rowNumber,
                                                      LastModified=tmp_LastModified,
                                                      LastUpdate=tmp_LastUpdate,
                                                      FilePath=unicode(m.group(3)),
                                                      FileName=unicode(m.group(4)),
                                                      Size=unicode(m.group(5)),
                                                      ExecFlag=str(m.group(6)),
                                                      InstanceID=instanceID)
                    rowsData.append(namedrow)
                    rowNumber += 1
                else:
                    logger.warning("Entry regex failed for: %s - %s" % (hostID, r))
Ejemplo n.º 22
0
    def processFile(self, file_fullpath, hostID, instanceID, rowsData):
        minSQLiteDTS = datetime(1, 1, 1, 0, 0, 0)
        maxSQLiteDTS = datetime(9999, 12, 31, 0, 0, 0)
        # alltags = set()
        tag_mapping = [
            ('AmCacheCompanyName', 'N/A', 'string'),
            ('AmCacheCompileTime', 'N/A', 'datetime'),
            ('AmCacheCreated', 'N/A', 'datetime'),
            ('AmCacheFileDescription', 'N/A', 'string'),
            ('AmCacheFilePath', 'N/A', 'string'),
            ('AmCacheFileSize', 'N/A', 'string'),
            ('AmCacheFileVersionNumber', 'N/A', 'string'),
            ('AmCacheFileVersionString', 'N/A', 'string'),
            ('AmCacheLanguageCode', 'N/A', 'string'),
            ('AmCacheLastModified', 'N/A', 'datetime'),
            ('AmCacheRegLastModified', 'LastModified', 'datetime'),
            ('AmCachePEHeaderChecksum', 'N/A', 'string'),
            ('AmCachePEHeaderHash', 'N/A', 'string'),
            ('AmCachePEHeaderSize', 'N/A', 'string'),
            ('AmCacheProductName', 'N/A', 'string'),
            ('AmCacheProgramID', 'N/A', 'string'),
            ('AmCacheSha1', 'SHA1', 'string'),
            ('FileItem_Accessed', 'N/A', 'datetime'),
            ('FileItem_Changed', 'N/A', 'datetime'),
            ('FileItem_Created', 'N/A', 'datetime'),
            ('FileItem_DevicePath', 'N/A', 'string'),
            ('FileItem_Drive', 'N/A', 'string'),
            ('FileItem_FileAttributes', 'N/A', 'string'),
            ('FileItem_FileExtension', 'N/A', 'string'),
            ('FileItem_FileName', 'N/A', 'string'),
            ('FileItem_FilePath', 'N/A', 'string'),
            ('FileItem_FullPath', 'N/A', 'string'),
            ('FileItem_Md5sum', 'N/A', 'string'),
            ('FileItem_Modified', 'N/A', 'datetime'),
            ('FileItem_PEInfo_BaseAddress', 'N/A', 'string'),
            ('FileItem_PEInfo_DigitalSignature_Description', 'N/A', 'string'),
            ('FileItem_PEInfo_ExtraneousBytes', 'N/A', 'string'),
            ('FileItem_PEInfo_PEChecksum_PEComputedAPI', 'N/A', 'string'),
            ('FileItem_PEInfo_PEChecksum_PEFileAPI', 'N/A', 'string'),
            ('FileItem_PEInfo_PEChecksum_PEFileRaw', 'N/A', 'string'),
            ('FileItem_PEInfo_PETimeStamp', 'N/A', 'datetime'),
            ('FileItem_PEInfo_Subsystem', 'N/A', 'string'),
            ('FileItem_PEInfo_Type', 'N/A', 'string'),
            ('FileItem_SecurityID', 'N/A', 'string'),
            ('FileItem_SecurityType', 'N/A', 'string'),
            ('FileItem_SizeInBytes', 'N/A', 'string'),
            ('FileItem_Username', 'N/A', 'string'),
            ('ProgramEntryPresent', 'N/A', 'string'),
            ('ProgramInstallDate', 'N/A', 'datetime'),
            ('ProgramInstallSource', 'N/A', 'string'),
            ('ProgramLocaleID', 'N/A', 'string'),
            ('ProgramName', 'N/A', 'string'),
            ('ProgramPackageCode', 'N/A', 'string'),
            ('ProgramPackageCode2', 'N/A', 'string'),
            ('ProgramUninstallKey', 'N/A', 'string'),
            ('ProgramUnknownTimestamp', 'N/A', 'datetime'),
            ('ProgramVendorName', 'N/A', 'string'),
            ('ProgramVersion', 'N/A', 'string')
        ]

        rowNumber = 0
        check_tags = ['AmCacheRegLastModified']
        try:
            xml_data = loadFile(file_fullpath)
            for event, element in etree.iterparse(xml_data, events=("end", )):
                skip_entry = False
                tag_dict = {}
                if element.tag == "AmCacheItem":
                    self._processElement(element, tag_dict)

                    # Check we have everything we need and ignore entries with critical XML errors on them
                    for tag in check_tags:
                        if tag not in tag_dict:
                            if 'AmCacheFilePath' in tag_dict:
                                logger.warning(
                                    "Missing tag [%s] in %s, entry: %s (skipping entry)"
                                    % (tag, tag_dict['AmCacheFilePath'],
                                       file_fullpath))
                            else:
                                logger.warning(
                                    "Malformed tag [%s] in %s, entry: Unknown (skipping entry)"
                                    % (tag, file_fullpath))
                            skip_entry = True
                            break
                        if tag_dict[tag] is None:
                            if 'AmCacheFilePath' in tag_dict:
                                logger.warning(
                                    "Malformed tag [%s: %s] in %s, entry: %s (skipping entry)"
                                    % (tag, tag_dict[tag],
                                       tag_dict['AmCacheFilePath'],
                                       file_fullpath))
                            else:
                                logger.warning(
                                    "Malformed tag [%s: %s] in %s, entry: Unknown (skipping entry)"
                                    % (tag, tag_dict[tag], file_fullpath))
                            skip_entry = True
                            break

                    # Some entries in AmCache do not refer to files per se (like installed program entries)
                    # We don't have much use for them right now but let's keep the data there until I figure what to do with them
                    if 'AmCacheFilePath' not in tag_dict:
                        if 'ProgramName' in tag_dict:
                            tag_dict['AmCacheFilePath'] = tag_dict[
                                'ProgramName']
                        else:
                            # If we have nothing we can use here we skip the entry for now
                            # todo: pretty-print the tag_dict to the log file
                            logger.warning(
                                "AmCache entry with no AppCompatPath or ProgramName. (skipping entry) %s"
                                % file_fullpath)
                            break

                    # If the entry is valid do some housekeeping:
                    if not skip_entry:
                        if 'ExecutionFlag' in tag_dict:
                            if tag_dict['ExecutionFlag'] == '1':
                                tmpExecFlag = True
                            elif tag_dict['ExecutionFlag'] == '0':
                                tmpExecFlag = False
                            else:
                                tmpExecFlag = tag_dict['ExecutionFlag']
                        else:
                            # todo: Not all OS's have exec flag. Need to change the schema to reflect those cases!
                            tmpExecFlag = False

                        try:
                            # # Convert TS to datetime format
                            # if 'LastModified' in tag_dict:
                            #     tmp_LastModified = tag_dict['LastModified'].replace("T", " ").replace("Z", "")
                            #     if type(tmp_LastModified) is not datetime:
                            #         tmp_LastModified = datetime.strptime(tmp_LastModified, "%Y-%m-%d %H:%M:%S")
                            # else: tmp_LastModified = minSQLiteDTS
                            #
                            # if 'LastUpdate' in tag_dict:
                            #     tmp_LastUpdate = tag_dict['LastUpdate'].replace("T", " ").replace("Z", "")
                            #     if type(tmp_LastUpdate) is not datetime:
                            #         tmp_LastUpdate = datetime.strptime(tmp_LastUpdate, "%Y-%m-%d %H:%M:%S")
                            # else: tmp_LastUpdate = minSQLiteDTS

                            row_dict = {}
                            row_dict['HostID'] = hostID
                            row_dict['EntryType'] = settings.__APPCOMPAT__
                            row_dict['RowNumber'] = rowNumber
                            row_dict['InstanceID'] = instanceID
                            # row_dict['LastModified'] = tmp_LastModified
                            # row_dict['LastUpdate'] = tmp_LastUpdate
                            row_dict['FileName'] = ntpath.basename(
                                tag_dict['AmCacheFilePath'])
                            row_dict['FilePath'] = ntpath.dirname(
                                tag_dict['AmCacheFilePath'])
                            row_dict['Size'] = (tag_dict['Size'] if 'Size'
                                                in tag_dict else 'N/A')
                            row_dict['ExecFlag'] = tmpExecFlag

                            # for tag in tag_dict.keys():
                            #     alltags.add(tag)

                            # Add all tags available with mappings to our database schema
                            for src_tag, dest_tag, dest_type in tag_mapping:
                                if dest_tag <> 'N/A':
                                    if src_tag in tag_dict:
                                        if dest_type == 'datetime':
                                            tmp_timestamp = tag_dict[
                                                src_tag].replace("T",
                                                                 " ").replace(
                                                                     "Z", "")
                                            if type(tmp_timestamp
                                                    ) is not datetime:
                                                tmp_timestamp = datetime.strptime(
                                                    tmp_timestamp,
                                                    "%Y-%m-%d %H:%M:%S")
                                                row_dict[
                                                    dest_tag] = tmp_timestamp
                                        else:
                                            row_dict[dest_tag] = tag_dict[
                                                src_tag]

                            namedrow = settings.EntriesFields(**row_dict)
                            rowsData.append(namedrow)
                            rowNumber += 1
                        except Exception as e:
                            print("crap")
                            exc_type, exc_obj, exc_tb = sys.exc_info()
                            fname = os.path.split(
                                exc_tb.tb_frame.f_code.co_filename)[1]
                            logger.info(
                                "Exception processing row (%s): %s [%s / %s / %s]"
                                % (e.message, file_fullpath, exc_type, fname,
                                   exc_tb.tb_lineno))
            else:
                pass
                element.clear()
            xml_data.close()
        except Exception as e:
            print e.message
            print traceback.format_exc()
            pass