コード例 #1
0
    def process_mft(self, par_id, configuration, table_list, knowledge_base):
        mft_object = open(self._mft_path, 'rb')

        mft_file = MFT.MasterFileTableParser(mft_object)

        info = [par_id, configuration.case_id, configuration.evidence_id]

        mft_list = []
        for file_record in mft_file.file_records():
            try:
                file_paths = mft_file.build_full_paths(file_record, True)
                self.path_dict[file_record] = [e[0] for e in file_paths]
            except MFT.MasterFileTableException:
                continue
            # TODO: file_path 중복 수정
            if not file_paths:
                mft_item = mft_parser.mft_parse(info, mft_file, file_record,
                                                file_paths,
                                                knowledge_base.time_zone)

            else:
                mft_item = mft_parser.mft_parse(info, mft_file, file_record,
                                                [file_paths[0]],
                                                knowledge_base.time_zone)

            mft_list.extend(mft_item)

        query = f"Insert into {table_list[0]} values(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, " \
                f"%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s);"

        configuration.cursor.bulk_execute(query, mft_list)
        # print(f'mft num: {len(mft_list)}')

        return mft_file
コード例 #2
0
ファイル: usnjrnl_parser.py プロジェクト: Kimwonkyung/carpe
def usnjrnl_parse(mft_file, usn_record):
    r_usn = usn_record.get_usn()
    r_source = USN.ResolveSourceCodes(usn_record.get_source_info())
    r_reason = USN.ResolveReasonCodes(usn_record.get_reason())
    fr_reference_number = usn_record.get_file_reference_number()
    parent_fr_reference_number = usn_record.get_parent_file_reference_number()

    if type(usn_record) is USN.USN_RECORD_V2_OR_V3:
        r_timestamp = util.format_timestamp(usn_record.get_timestamp())
        fr_file_name = usn_record.get_file_name()
    else:
        r_timestamp = ''
        fr_file_name = ''

    fr_number, fr_sequence = MFT.DecodeFileRecordSegmentReference(fr_reference_number)

    try:
        file_record = mft_file.get_file_record_by_number(fr_number, fr_sequence)
        file_paths = mft_file.build_full_paths(file_record)
    except MFT.MasterFileTableException:
        fr_file_path = ''
    else:
        if len(file_paths) > 0:
            fr_file_path = file_paths[0]
        else:
            fr_file_path = ''

    return (r_usn, r_source, r_reason, fr_reference_number, parent_fr_reference_number, r_timestamp,
            fr_file_name, fr_file_path)
コード例 #3
0
ファイル: ntfs_connector.py プロジェクト: Kimwonkyung/carpe
    def process_mft(self, par_id, configuration, table_list):
        mft_object = open(self._mft_path, 'rb')

        mft_file = MFT.MasterFileTableParser(mft_object)

        info = tuple(
            [par_id, configuration.case_id, configuration.evidence_id])

        query = f"Insert into {table_list[0]} values(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, " \
                f"%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s);"

        mft_list = []
        for idx, file_record in tqdm(enumerate(mft_file.file_records())):
            try:
                file_paths = mft_file.build_full_paths(file_record, True)
            except MFT.MasterFileTableException:
                continue
            # TODO: file_path 중복 수정
            if not file_paths:
                mft_list.extend(
                    mft_parser.mft_parse(info, mft_file, file_record,
                                         file_paths))
            else:
                mft_list.extend(
                    mft_parser.mft_parse(info, mft_file, file_record,
                                         [file_paths[0]]))

        print(f'mft num: {len(mft_list)}')
        configuration.cursor.bulk_execute(query, mft_list)
コード例 #4
0
ファイル: ntfs_connector.py プロジェクト: Kimwonkyung/carpe
    def process_logfile(self, par_id, configuration, table_list):
        logfile_object = open(self._logfile_path, 'rb')
        mft_object = open(self._mft_path, 'rb')

        log_file = LogFile.LogFileParser(logfile_object)
        mft_file = MFT.MasterFileTableParser(mft_object)

        restart_area_list = []
        log_record_list = []
        info = tuple(
            [par_id, configuration.case_id, configuration.evidence_id])

        for idx, log_item in tqdm(enumerate(log_file.parse_ntfs_records())):
            if not type(log_item):
                continue
            elif type(log_item) is LogFile.NTFSRestartArea:
                output_data = logfile_parser.restart_area_parse(log_item)
                restart_area_list.append(info + tuple(output_data))
            elif type(log_item) is LogFile.NTFSLogRecord:
                output_data = logfile_parser.log_record_parse(
                    log_item, mft_file)
                log_record_list.append(info + tuple(output_data))

        restart_area_query = f"Insert into {table_list[1]} values(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s);"
        log_record_query = f"Insert into {table_list[2]} values(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s);"

        print(f'restart area num: {len(restart_area_list)}')
        print(f'log record num: {len(log_record_list)}')
        configuration.cursor.bulk_execute(restart_area_query,
                                          restart_area_list)
        configuration.cursor.bulk_execute(log_record_query, log_record_list)
コード例 #5
0
ファイル: ntfs_connector.py プロジェクト: Kimwonkyung/carpe
    def process_usnjrnl(self, par_id, configuration, table_list):
        usn_object = open(self._usnjrnl_path, 'rb')
        mft_object = open(self._mft_path, 'rb')
        usn_journal = USN.ChangeJournalParser(usn_object)
        mft_file = MFT.MasterFileTableParser(mft_object)

        query = f"Insert into {table_list[3]} values (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s);"

        usnjrnl_list = []
        for idx, usn_record in tqdm(enumerate(usn_journal.usn_records())):
            usn_tuple = usnjrnl_parser.usnjrnl_parse(mft_file, usn_record)
            values = (par_id, configuration.case_id,
                      configuration.evidence_id) + usn_tuple
            usnjrnl_list.append(values)

        print(f'usnjrnl num: {len(usnjrnl_list)}')
        configuration.cursor.bulk_execute(query, usnjrnl_list)
コード例 #6
0
ファイル: logfile_parser.py プロジェクト: naaya17/carpe
def log_record_parse(log_record, mft_file, path_dict, time_zone):
    target_attribute_name = None
    redo_op = log_record.get_redo_operation()
    undo_op = log_record.get_undo_operation()
    redo_data = log_record.get_redo_data()
    undo_data = log_record.get_undo_data()

    log_record_items = [log_record.lsn, log_record.transaction_id,
                        LogFile.ResolveNTFSOperation(redo_op), LogFile.ResolveNTFSOperation(undo_op)]

    target = log_record.calculate_mft_target_number()
    if target is not None:
        log_record_items.append(target)

        try:
            file_record = mft_file.get_file_record_by_number(target)
            if file_record in path_dict.keys():
                file_paths = path_dict[file_record]
            else:
                file_paths = mft_file.build_full_paths(file_record)
        except MFT.MasterFileTableException:
            fr_file_path = None
        else:
            if len(file_paths) > 0:
                fr_file_path = file_paths[0]
            else:
                fr_file_path = None
        log_record_items.append(None)  # target_reference
        log_record_items.append(None)  # target_attribute_name
        log_record_items.append(fr_file_path)
    else:
        log_record_items.append(None)  # target_file_number
        target = log_record.calculate_mft_target_reference_and_name()
        if target is not None:
            target_reference, target_attribute_name = target

            log_record_items.append(target_reference)

            if target_attribute_name is None:
                target_attribute_name = '-'

            log_record_items.append(target_attribute_name)

            fr_number, fr_sequence = MFT.DecodeFileRecordSegmentReference(target_reference)

            try:
                file_record = mft_file.get_file_record_by_number(fr_number, fr_sequence)
                if file_record in path_dict.keys():
                    file_paths = path_dict[file_record]
                else:
                    file_paths = mft_file.build_full_paths(file_record)
            except MFT.MasterFileTableException:
                fr_file_path = None
            else:
                if len(file_paths) > 0:
                    fr_file_path = file_paths[0]
                else:
                    fr_file_path = None
            log_record_items.append(fr_file_path)
        else:
            log_record_items.append(None)  # target_reference
            log_record_items.append(None)  # target_attribute_name
            log_record_items.append(None)  # file_path

    offset_in_target = log_record.calculate_offset_in_target()
    if offset_in_target is not None:
        log_record_items.append(offset_in_target)
    else:
        log_record_items.append('Unknown')

    lcns = []
    try:
        for lcn in log_record.get_lcns_for_page():
            lcns.append(str(lcn))
    except LogFile.ClientException:
        return None

    if len(lcns) > 0:
        log_record_items.append(' '.join(lcns))
    else:
        log_record_items.append(None)  # lcns

    log_record_items.append(redo_data)
    log_record_items.append(undo_data)

    attr_items = {}

    if redo_op == LogFile.InitializeFileRecordSegment:
        frs_size = log_record.get_target_block_size() * 512
        if frs_size == 0:
            frs_size = 1024

        frs_buf = redo_data + (b'\x00' * (frs_size - len(redo_data)))

        try:
            frs = MFT.FileRecordSegment(frs_buf, False)
        except MFT.MasterFileTableException:
            pass
        else:
            try:
                for frs_attr in frs.attributes():
                    if type(frs_attr) is MFT.AttributeRecordNonresident:
                        continue

                    frs_attr_val = frs_attr.value_decoded()
                    if type(frs_attr_val) is Attributes.StandardInformation:
                        std_info = {
                            'attr_val': '$STANDARD_INFORMATION',
                            'm_time': util.format_timestamp(frs_attr_val.get_mtime(), time_zone),
                            'a_time': util.format_timestamp(frs_attr_val.get_atime(), time_zone),
                            'c_time': util.format_timestamp(frs_attr_val.get_ctime(), time_zone),
                            'e_time': util.format_timestamp(frs_attr_val.get_etime(), time_zone),
                            'file_attributes': Attributes.ResolveFileAttributes(
                                frs_attr_val.get_file_attributes())
                        }
                        attr_items['std_info'] = std_info

                    elif type(frs_attr_val) is Attributes.FileName:
                        file_name = {
                            'attr_val': '$FILE_NAME',
                            'm_time': util.format_timestamp(frs_attr_val.get_mtime(), time_zone),
                            'a_time': util.format_timestamp(frs_attr_val.get_atime(), time_zone),
                            'c_time': util.format_timestamp(frs_attr_val.get_ctime(), time_zone),
                            'e_time': util.format_timestamp(frs_attr_val.get_etime(), time_zone),
                            'file_name': frs_attr_val.get_file_name()
                        }

                        parent_reference = frs_attr_val.get_parent_directory()
                        file_name['parent_reference'] = parent_reference

                        fr_number, fr_sequence = MFT.DecodeFileRecordSegmentReference(parent_reference)

                        try:
                            file_record = mft_file.get_file_record_by_number(fr_number, fr_sequence)
                            if file_record in path_dict.keys():
                                file_paths = path_dict[file_record]
                            else:
                                file_paths = mft_file.build_full_paths(file_record)
                        except MFT.MasterFileTableException:
                            fr_file_path = None
                        else:
                            if len(file_paths) > 0:
                                fr_file_path = file_paths[0]
                            else:
                                fr_file_path = None
                        file_name['parent_file_path'] = fr_file_path
                        if 'file_name' in attr_items:
                            attr_items['second_file_name'] = file_name
                        else:
                            attr_items['file_name'] = file_name

                    elif type(frs_attr_val) is Attributes.ObjectID:
                        object_id = {
                            'attr_val': '$OBJECT_ID',
                            'guid': str(frs_attr_val.get_object_id()),
                            'timestamp': util.format_timestamp(frs_attr_val.get_timestamp(), time_zone)
                        }
                        attr_items['object_id'] = object_id

            except MFT.MasterFileTableException:
                pass

    if redo_op == LogFile.CreateAttribute or undo_op == LogFile.CreateAttribute or \
            redo_op == LogFile.WriteEndOfFileRecordSegment or undo_op == LogFile.WriteEndOfFileRecordSegment:
        if redo_op == LogFile.CreateAttribute:
            attr_buf = redo_data
        elif undo_op == LogFile.CreateAttribute:
            attr_buf = undo_data
        else:
            if len(redo_data) > len(undo_data):
                attr_buf = redo_data
            else:
                attr_buf = undo_data

        if len(attr_buf) >= 24:
            type_code, record_length, form_code, name_length, name_offset, flags, instance = \
                MFT.UnpackAttributeRecordPartialHeader(attr_buf[0: 16])
            value_length, value_offset, resident_flags, reserved = \
                MFT.UnpackAttributeRecordRemainingHeaderResident(attr_buf[16: 24])

            if value_offset > 0 and value_offset % 8 == 0 and value_length > 0:
                attr_value_buf = attr_buf[value_offset: value_offset + value_length]
                if len(attr_value_buf) == value_length:
                    if type_code == Attributes.ATTR_TYPE_STANDARD_INFORMATION:
                        attr_si = Attributes.StandardInformation(attr_value_buf)
                        std_info = {
                            'attr_val': '$STANDARD_INFORMATION',
                            'm_time': util.format_timestamp(attr_si.get_mtime(), time_zone),
                            'a_time': util.format_timestamp(attr_si.get_atime(), time_zone),
                            'c_time': util.format_timestamp(attr_si.get_ctime(), time_zone),
                            'e_time': util.format_timestamp(attr_si.get_etime(), time_zone),
                            'file_attributes': Attributes.ResolveFileAttributes(
                                attr_si.get_file_attributes())
                        }
                        attr_items['std_info'] = std_info

                    elif type_code == Attributes.ATTR_TYPE_FILE_NAME:
                        attr_fn = Attributes.FileName(attr_value_buf)

                        file_name = {
                            'attr_val': '$FILE_NAME',
                            'm_time': util.format_timestamp(attr_fn.get_mtime(), time_zone),
                            'a_time': util.format_timestamp(attr_fn.get_atime(), time_zone),
                            'c_time': util.format_timestamp(attr_fn.get_ctime(), time_zone),
                            'e_time': util.format_timestamp(attr_fn.get_etime(), time_zone),
                            'file_name': attr_fn.get_file_name()
                        }

                        parent_reference = attr_fn.get_parent_directory()
                        file_name['parent_reference'] = parent_reference

                        fr_number, fr_sequence = MFT.DecodeFileRecordSegmentReference(parent_reference)

                        try:
                            file_record = mft_file.get_file_record_by_number(fr_number, fr_sequence)
                            if file_record in path_dict.keys():
                                file_paths = path_dict[file_record]
                            else:
                                file_paths = mft_file.build_full_paths(file_record)
                        except MFT.MasterFileTableException:
                            fr_file_path = None
                        else:
                            if len(file_paths) > 0:
                                fr_file_path = file_paths[0]
                            else:
                                fr_file_path = None
                        file_name['parent_file_path'] = fr_file_path
                        attr_items['file_name'] = file_name

                    elif type_code == Attributes.ATTR_TYPE_OBJECT_ID:
                        attr_objid = Attributes.ObjectID(attr_value_buf)

                        object_id = {
                            'attr_val': '$OBJECT_ID',
                            'guid': str(attr_objid.get_object_id()),
                            'timestamp': util.format_timestamp(attr_objid.get_timestamp(), time_zone)
                        }
                        attr_items['object_id'] = object_id

    if redo_op == LogFile.AddIndexEntryRoot or redo_op == LogFile.AddIndexEntryAllocation or \
            redo_op == LogFile.WriteEndOfIndexBuffer or undo_op == LogFile.AddIndexEntryRoot or \
            undo_op == LogFile.AddIndexEntryAllocation or undo_op == LogFile.WriteEndOfIndexBuffer:
        if redo_op == LogFile.AddIndexEntryRoot or redo_op == LogFile.AddIndexEntryAllocation or \
                redo_op == LogFile.WriteEndOfIndexBuffer:
            index_entry = Attributes.IndexEntry(redo_data)
        else:
            index_entry = Attributes.IndexEntry(undo_data)

        attr_value_buf = index_entry.get_attribute()
        if attr_value_buf is not None and len(attr_value_buf) > 66:
            attr_fn = Attributes.FileName(attr_value_buf)

            file_name = {
                'attr_val': '$FILE_NAME in index'
            }
            try:
                file_name['mtime'] = util.format_timestamp(attr_fn.get_mtime(), time_zone)
                file_name['atime'] = util.format_timestamp(attr_fn.get_atime(), time_zone)
                file_name['ctime'] = util.format_timestamp(attr_fn.get_ctime(), time_zone)
                file_name['etime'] = util.format_timestamp(attr_fn.get_etime(), time_zone)
                file_name['file_name'] = attr_fn.get_file_name()
            except (ValueError, OverflowError):
                pass

            parent_reference = attr_fn.get_parent_directory()
            file_name['parent_reference'] = parent_reference

            fr_number, fr_sequence = MFT.DecodeFileRecordSegmentReference(parent_reference)

            try:
                file_record = mft_file.get_file_record_by_number(fr_number, fr_sequence)
                if file_record in path_dict.keys():
                    file_paths = path_dict[file_record]
                else:
                    file_paths = mft_file.build_full_paths(file_record)
            except MFT.MasterFileTableException:
                fr_file_path = None
            else:
                if len(file_paths) > 0:
                    fr_file_path = file_paths[0]
                else:
                    fr_file_path = None

            file_name['parent_file_path'] = fr_file_path
            attr_items['file_name_index'] = file_name

    if offset_in_target is not None and (redo_op == LogFile.UpdateResidentValue
                                         or undo_op == LogFile.UpdateResidentValue):
        frs_size = log_record.get_target_block_size() * 512
        if frs_size == 0:
            frs_size = 1024

        if frs_size == 1024 or frs_size == 4096:
            si_attr_offset = 56 + 24
            if frs_size == 4096:
                si_attr_offset = 72 + 24

            if si_attr_offset <= offset_in_target <= si_attr_offset + 32:
                buf = redo_data
                if len(buf) >= 8:
                    attr_si = Attributes.StandardInformationPartial(buf, offset_in_target - si_attr_offset)

                    possible_std_info_redo = {
                        'attr_val': 'Possible update to $STANDARD_INFORMATION (redo data)',
                        'mtime': util.format_timestamp(attr_si.get_mtime(), time_zone),
                        'atime': util.format_timestamp(attr_si.get_atime(), time_zone),
                        'ctime': util.format_timestamp(attr_si.get_ctime(), time_zone),
                        'etime': util.format_timestamp(attr_si.get_etime(), time_zone)
                    }
                    attr_items['possible_std_info_redo'] = possible_std_info_redo

                buf = undo_data
                if len(buf) >= 8:
                    attr_si = Attributes.StandardInformationPartial(buf, offset_in_target - si_attr_offset)

                    possible_std_info_undo = {
                        'attr_val': 'Possible update to $STANDARD_INFORMATION (undo data)',
                        'mtime': util.format_timestamp(attr_si.get_mtime(), time_zone),
                        'atime': util.format_timestamp(attr_si.get_atime(), time_zone),
                        'ctime': util.format_timestamp(attr_si.get_ctime(), time_zone),
                        'etime': util.format_timestamp(attr_si.get_etime(), time_zone)
                    }
                    attr_items['possible_std_info_undo'] = possible_std_info_undo

    if target_attribute_name == '$J':
        usn_data_1 = None
        usn_data_2 = None

        if redo_op == LogFile.UpdateNonresidentValue:
            usn_data_1 = redo_data
        if undo_op == LogFile.UpdateNonresidentValue:
            usn_data_2 = undo_data

        usn = {}
        for usn_data in [usn_data_1, usn_data_2]:
            if usn_data is not None:
                try:
                    usn_record = USN.GetUsnRecord(usn_data)
                except (NotImplementedError, ValueError):
                    pass
                else:
                    if type(usn_record) is USN.USN_RECORD_V4:
                        usn['version'] = 'version 4'
                    else:
                        usn['version'] = 'version 2 or 3'

                    usn['usn'] = usn_record.get_usn()
                    usn['source_info'] = USN.ResolveSourceCodes(usn_record.get_source_info())
                    usn['reason'] = USN.ResolveReasonCodes(usn_record.get_reason())
                    usn['file_ref_num'] = usn_record.get_file_reference_number()
                    usn['parent_file_reference_number'] = usn_record.get_parent_file_reference_number()

                    if type(usn_record) is USN.USN_RECORD_V2_OR_V3:
                        usn['timestamp'] = util.format_timestamp(usn_record.get_timestamp(), time_zone)
                        usn['file_name'] = usn_record.get_file_name()

                    attr_items['usn'] = usn

    log_record_items.append(str(json.dumps(attr_items)))

    return log_record_items
コード例 #7
0
ファイル: mft_parser.py プロジェクト: naaya17/carpe
def mft_parse(info, mft_file, file_record, file_paths, time_zone):
    mft_list = []

    attr_standard_information = None
    file_size = None
    ads_set = set()
    objid_time = None

    wsl_found = False
    wsl_mtime = ''
    wsl_atime = ''
    wsl_chtime = ''

    for attribute in file_record.attributes():
        if type(attribute) is MFT.AttributeRecordResident:
            attribute_value = attribute.value_decoded()

            if type(attribute_value) is Attributes.StandardInformation:
                if attr_standard_information is None:
                    attr_standard_information = attribute_value

            if type(attribute_value) is Attributes.ObjectID:
                if objid_time is None:
                    objid_time = attribute_value.get_timestamp()

            if type(attribute_value) is Attributes.EA:
                if not wsl_found:
                    for ea_name, ea_flags, ea_value in attribute_value.data_parsed():
                        if ea_name == b'LXATTRB\x00':
                            try:
                                lxattrb = WSL.LXATTRB(ea_value)
                            except ValueError:
                                pass
                            else:
                                wsl_found = True
                                wsl_atime = util.format_timestamp(lxattrb.get_atime(), time_zone)
                                wsl_mtime = util.format_timestamp(lxattrb.get_mtime(), time_zone)
                                wsl_chtime = util.format_timestamp(lxattrb.get_chtime(), time_zone)

            if attribute.type_code == Attributes.ATTR_TYPE_DATA and attribute.name is None:
                if file_size is None:
                    file_size = str(len(attribute.value))

            if attribute.type_code == Attributes.ATTR_TYPE_DATA and attribute.name is not None:
                ads_set.add(attribute.name)
        else:
            if attribute.type_code == Attributes.ATTR_TYPE_DATA and attribute.name is None and attribute.lowest_vcn == 0:
                if file_size is None:
                    file_size = str(attribute.file_size)

            if attribute.type_code == Attributes.ATTR_TYPE_DATA and attribute.name is not None:
                ads_set.add(attribute.name)

    if file_size is None:
        file_size = '?'

    if len(ads_set) > 0:
        ads_list = ' '.join(sorted(ads_set))
    else:
        ads_list = ''

    if objid_time is None:
        objid_time = ''
    else:
        objid_time = util.format_timestamp(objid_time, time_zone)

    if attr_standard_information is not None:
        si_mtime = util.format_timestamp(attr_standard_information.get_mtime(), time_zone)
        si_atime = util.format_timestamp(attr_standard_information.get_atime(), time_zone)
        si_ctime = util.format_timestamp(attr_standard_information.get_ctime(), time_zone)
        si_etime = util.format_timestamp(attr_standard_information.get_etime(), time_zone)
        si_usn = attr_standard_information.get_usn()
    else:
        si_mtime = ''
        si_atime = ''
        si_ctime = ''
        si_etime = ''
        si_usn = ''

    fr_lsn = file_record.get_logfile_sequence_number()

    if file_record.is_in_use():
        fr_in_use = 'Y'
    else:
        fr_in_use = 'N'

    if file_record.get_flags() & MFT.FILE_FILE_NAME_INDEX_PRESENT > 0:
        fr_directory = 'Y'
    else:
        fr_directory = 'N'

    fr_number = str(MFT.EncodeFileRecordSegmentReference(file_record.get_master_file_table_number(), file_record.get_sequence_number()))

    if len(file_paths) > 0:
        for file_path, attr_file_name in file_paths:
            fn_mtime = util.format_timestamp(attr_file_name.get_mtime(), time_zone)
            fn_atime = util.format_timestamp(attr_file_name.get_atime(), time_zone)
            fn_ctime = util.format_timestamp(attr_file_name.get_ctime(), time_zone)
            fn_etime = util.format_timestamp(attr_file_name.get_etime(), time_zone)

            mft_list.append(info + ['File record', fr_number, fr_in_use, fr_directory, fr_lsn, file_path,
                            si_mtime, si_atime, si_ctime, si_etime, si_usn, fn_mtime, fn_atime, fn_ctime, fn_etime,
                            objid_time, file_size, ads_list, wsl_mtime, wsl_atime, wsl_chtime])
    else:
        mft_list.append(info + ['File record', fr_number, fr_in_use, fr_directory, fr_lsn, '',
                        si_mtime, si_atime, si_ctime, si_etime, si_usn, '', '', '', '', objid_time, file_size, ads_list,
                        wsl_mtime, wsl_atime, wsl_chtime])

    # Parse a file name index in this file record (if present).
    attr_index_root = None

    if file_record.get_flags() & MFT.FILE_FILE_NAME_INDEX_PRESENT > 0:
        for attribute in file_record.attributes():
            if type(attribute) is MFT.AttributeRecordResident:
                attribute_value = attribute.value_decoded()

                if type(attribute_value) is Attributes.IndexRoot:
                    if attribute_value.get_indexed_attribute_type_code() == Attributes.ATTR_TYPE_FILE_NAME:
                        attr_index_root = attribute_value
                        break

        if attr_index_root is not None:
            for index_entry in attr_index_root.index_entries():
                attr_file_name_raw = index_entry.get_attribute()
                if attr_file_name_raw is None:
                    continue

                attr_file_name = Attributes.FileName(attr_file_name_raw)

                if len(file_paths) > 0:
                    dir_path = file_paths[0][0]
                    if dir_path == '/.':
                        dir_path = ''

                    file_path = MFT.PATH_SEPARATOR.join([dir_path, attr_file_name.get_file_name()])
                else:
                    file_path = MFT.PATH_SEPARATOR.join(['<Unknown>', attr_file_name.get_file_name()])

                fr_number = str(index_entry.get_file_reference())

                if attr_file_name.get_file_attributes() & Attributes.DUP_FILE_NAME_INDEX_PRESENT > 0:
                    fr_directory = 'Y'
                else:
                    fr_directory = 'N'

                fn_mtime = util.format_timestamp(attr_file_name.get_mtime(), time_zone)
                fn_atime = util.format_timestamp(attr_file_name.get_atime(), time_zone)
                fn_ctime = util.format_timestamp(attr_file_name.get_ctime(), time_zone)
                fn_etime = util.format_timestamp(attr_file_name.get_etime(), time_zone)

                file_size = attr_file_name.get_file_size()

                mft_list.append(info + ['Index record', fr_number, '?', fr_directory, '', file_path, '', '', '', '',
                                        '', fn_mtime, fn_atime, fn_ctime, fn_etime, '', file_size, '', '', '', ''])

    # Parse slack space in this file record (if present).
    for slack in file_record.slack():
        for attr_file_name in slack.carve():
            parent_directory_reference = attr_file_name.get_parent_directory()
            parent_fr_number, parent_fr_sequence = MFT.DecodeFileRecordSegmentReference(
                parent_directory_reference)

            try:
                parent_file_record = mft_file.get_file_record_by_number(parent_fr_number, parent_fr_sequence)
                parent_file_paths = mft_file.build_full_paths(parent_file_record)
            except MFT.MasterFileTableException:
                parent_file_path = None
            else:
                if len(parent_file_paths) > 0:
                    parent_file_path = parent_file_paths[0]
                else:
                    parent_file_path = None

            if parent_file_path is not None:
                if parent_file_path == '/.':
                    parent_file_path = ''

                file_path = MFT.PATH_SEPARATOR.join([parent_file_path, attr_file_name.get_file_name()])
            else:
                file_path = MFT.PATH_SEPARATOR.join(['<Unknown>', attr_file_name.get_file_name()])

            if attr_file_name.get_file_attributes() & Attributes.DUP_FILE_NAME_INDEX_PRESENT > 0:
                fr_directory = 'Y'
            else:
                fr_directory = 'N'

            fn_mtime = util.format_timestamp(attr_file_name.get_mtime(), time_zone)
            fn_atime = util.format_timestamp(attr_file_name.get_atime(), time_zone)
            fn_ctime = util.format_timestamp(attr_file_name.get_ctime(), time_zone)
            fn_etime = util.format_timestamp(attr_file_name.get_etime(), time_zone)

            file_size = attr_file_name.get_file_size()

            mft_list.append(info + ['Slack', '?', '?', fr_directory, '', file_path, '', '', '', '', '',
                                    fn_mtime, fn_atime, fn_ctime, fn_etime, '', file_size, '', '', '', ''])

    return mft_list