Exemplo n.º 1
0
 def GetSubSystemAndCategory(self, sc_id):
     sc = self.items.get(sc_id, None)
     if sc:
         return (sc[0], sc[1])
     # Not found!
     logger.error("Could not find subsystem_category_id={}".format(sc_id))
     return ('', '')
Exemplo n.º 2
0
    def ReadFmtStringFromVirtualOffset(self, v_offset):
        '''Reads a format string for a specific virtual offset.

        Args:
          v_offset (int): virtual offset.

        Returns:
          str: a format string, '%s' if the 32-bit MSB (0x80000000) is set or
              '<compose failure [UUID]>' if the uuidtext file could not be
              parsed or there is no entry corresponding with the virtual offset.
        '''
        if not self._file.is_valid:
            # This is the value returned by the MacOS 'log' program if uuidtext
            # is not found.
            return '<compose failure [UUID]>'

        if v_offset & 0x80000000:
            return '%s'

        for range_start_offset, data_offset, data_len in self._entries:
            range_end_offset = range_start_offset + data_len
            if range_start_offset <= v_offset < range_end_offset:
                rel_offset = v_offset - range_start_offset

                file_object = self._file.file_pointer
                file_object.seek(data_offset + rel_offset)
                format_string_data = file_object.read(data_len - rel_offset)
                return self._ReadCString(format_string_data)

        # This is the value returned by the MacOS 'log' program if the uuidtext
        # entry is not found.
        logger.error('Invalid bounds 0x{0:X} for {1!s}'.format(
            v_offset, self.Uuid))
        return '<compose failure [UUID]>'
Exemplo n.º 3
0
 def GetProcInfoById(self, id):
     for proc_info in self.ProcInfos:
         if proc_info.id == id:
             return proc_info
     # Not found!
     logger.error("ProcInfo with id={} not found".format(id))
     return None
Exemplo n.º 4
0
def DecompressChunkData(chunk_data, data_len):
    '''Decompress an individual compressed chunk (tag=0x600D)'''
    uncompressed = b''
    if chunk_data[0:4] in [b'bv41', b'bv4-']:
        last_uncompressed = b''
        comp_start = 0  # bv** offset
        comp_header = chunk_data[comp_start:comp_start + 4]
        while (data_len > comp_start) and (comp_header != b'bv4$'):
            if comp_header == b'bv41':
                uncompressed_size, compressed_size = struct.unpack(
                    '<II', chunk_data[comp_start + 4:comp_start + 12])
                last_uncompressed = lz4.block.decompress(
                    chunk_data[comp_start + 12:comp_start + 12 +
                               compressed_size],
                    uncompressed_size,
                    dict=last_uncompressed)
                comp_start += 12 + compressed_size
                uncompressed += last_uncompressed
            elif comp_header == b'bv4-':
                uncompressed_size = struct.unpack(
                    '<I', chunk_data[comp_start + 4:comp_start + 8])[0]
                uncompressed += chunk_data[comp_start + 8:comp_start + 8 +
                                           uncompressed_size]
                comp_start += 8 + uncompressed_size
            else:
                logger.error(
                    'Unknown compression value {} @ 0x{:X} - {}'.format(
                        binascii.hexlify(comp_header), begin_pos + comp_start,
                        comp_header))
                break
            comp_header = chunk_data[comp_start:comp_start + 4]
    else:
        logger.error('Unknown compression type {}'.format(
            binascii.hexlify(chunk_data[16:20])))
    return uncompressed
Exemplo n.º 5
0
 def GetUuidEntryFromVirtualOffset(self, v_offset):
     '''Returns uuid_entry where uuid_entry[xx].v_off <= v_offset and falls within allowed size'''
     for b in self.uuid_entries:
         if (b[0] <= v_offset) and ((b[0] + b[1]) > v_offset):
             rel_offset = v_offset - b[0]
             return b
     #Not found
     logger.error('Failed to find uuid_entry for v_offset 0x{:X} in Dsc!'.format(v_offset))
     return None
Exemplo n.º 6
0
def ReadAPFSTime(mac_apfs_time): # Mac APFS timestamp is nano second time epoch beginning 1970/1/1
    '''Returns datetime object, or empty string upon error'''
    if mac_apfs_time not in ( 0, None, ''):
        try:
            if type(mac_apfs_time) in (str, unicode):
                mac_apfs_time = float(mac_apfs_time)
            return datetime.datetime(1970, 1, 1) + datetime.timedelta(seconds=mac_apfs_time/1000000000.)
        except Exception as ex:
            logger.error("ReadAPFSTime() Failed to convert timestamp from value " + str(mac_apfs_time) + " Error was: " + str(ex))
    return ''
Exemplo n.º 7
0
 def FindVirtualOffsetEntries(self, v_offset):
     '''Return tuple (range_entry, uuid_entry) where range_entry[xx].size <= v_offset'''
     ret_range_entry = None
     ret_uuid_entry = None
     for a in self.range_entries:
         if (a[1] <= v_offset) and ((a[1] + a[3]) > v_offset):
             ret_range_entry = a
             ret_uuid_entry = self.uuid_entries[a[0]]
             return (ret_range_entry, ret_uuid_entry)
     #Not found
     logger.error('Failed to find v_offset in Dsc!')
     return (None, None)
Exemplo n.º 8
0
def ReadTimesyncFile(buffer, ts_list):
    try:
        pos = 0
        size = len(buffer)
        while pos < size:
            sig, header_size, unk1 = struct.unpack("<HHI", buffer[pos:pos + 8])
            if sig != 0xBBB0:
                logger.error(
                    "not the right signature for Timesync header, got 0x{:04X} instead of 0x{:04X}, pos was 0x{:08X}"
                    .format(sig, 0x0030BBB0, pos))
                break
            uuid = UUID(bytes=buffer[pos + 8:pos + 24])
            ts_numer, ts_denom, t_stamp, tz, is_dst = struct.unpack(
                "<IIqiI", buffer[pos + 24:pos + 48])
            ts_header = resources.TimesyncHeader(sig, unk1, uuid, ts_numer,
                                                 ts_denom, t_stamp, tz, is_dst)
            pos += header_size  # 0x30 (48) by default
            if header_size != 0x30:
                logger.info(
                    "Timesync header was 0x{:X} bytes instead of 0x30(48) bytes!"
                    .format(size))
            logger.debug("TIMEHEAD {}  0x{:016X}  {} {}".format(
                uuid, t_stamp, ReadAPFSTime(t_stamp), 'boot'))
            #TODO - TEST search ts_list for existing, not seen so far
            existing_ts = None
            for ts in ts_list:
                if ts.header.boot_uuid == uuid:
                    existing_ts = ts
                    break
            if existing_ts:
                ts_obj = existing_ts
            else:
                ts_obj = resources.Timesync(ts_header)
                ts_list.append(ts_obj)
                # Adding header timestamp as Ts type too with cont_time = 0
                timesync_item = resources.TimesyncItem(0, 0, t_stamp, tz,
                                                       is_dst)
                ts_obj.items.append(timesync_item)
            while pos < size:
                if buffer[pos:pos + 4] == b'Ts \x00':
                    ts_unknown, cont_time, t_stamp, bias, is_dst = struct.unpack(
                        "<IqqiI", buffer[pos + 4:pos + 32])
                    timesync_item = resources.TimesyncItem(
                        ts_unknown, cont_time, t_stamp, bias, is_dst)
                    ts_obj.items.append(timesync_item)
                    logger.debug("TIMESYNC {}  0x{:016X}  {} {}".format(
                        uuid, t_stamp, ReadAPFSTime(t_stamp), ts_unknown))
                else:
                    break  # break this loop, parse as header
                pos += 32
    except Exception as ex:
        logger.exception("Exception reading TimesyncFile")
Exemplo n.º 9
0
 def open(self, mode='rb'):
     '''Opens a file for reading/writing, returns file pointer or None'''
     try:
         logger.debug('Trying to read {} file {}'.format(self.file_type, self.path))
         self.file_pointer = open(self.path, mode)
         return self.file_pointer
     except Exception as ex:
         if str(ex).find('No such file') == -1:
             logger.exception('Failed to open file {}'.format(self.path))
         else:
             logger.error('Failed to open as file not found {}'.format(self.path))
             self.file_not_found = True
         self.is_valid = False
     return None
Exemplo n.º 10
0
def DecompressTraceV3(trace_file, out_file):
    ''' Creates an uncompressed version of the .traceV3 file.
        Input parameters:
        trace_file = file pointer to .traceV3 file (opened as 'rb')
        out_file   = file pointer to blank file (opened as 'wb')
        Returns True/False
    '''
    try:
        index = 0
        tag = trace_file.read(4)
        while tag:
            begin_pos = trace_file.tell() - 4
            trace_file.seek(begin_pos + 8)
            struct_len = struct.unpack('<Q', trace_file.read(8))[0]
            logger.debug("index={} pos=0x{:X} tag=0x{}".format(
                index, begin_pos,
                binascii.hexlify(tag)[::-1]))

            trace_file.seek(begin_pos)
            chunk_data_incl_header = trace_file.read(16 + struct_len)
            if tag == b'\x00\x10\x00\x00':  # header
                out_file.write(chunk_data_incl_header
                               )  # boot_uuid header, write to output directly
            elif tag[0] == 0x0B:
                out_file.write(chunk_data_incl_header
                               )  # uncompressed, write to output directly
            elif tag[0] == 0x0D:
                uncompressed = DecompressChunkData(chunk_data_incl_header[16:],
                                                   struct_len)
                out_file.write(chunk_data_incl_header[0:8])  # Same Header !
                out_file.write(struct.pack('<Q',
                                           len(uncompressed)))  # New size
                out_file.write(uncompressed)
            else:
                logger.error('Unknown chunk tag value encountered : {}'.format(
                    binascii.hexlify(tag)))
                out_file.write(chunk_data_incl_header)
            if struct_len % 8:  # Go to QWORD boundary
                struct_len += 8 - (struct_len % 8)
            if out_file.tell() % 8:  # Go to QWORD boundary on output
                out_file.write(
                    b'\x00\x00\x00\x00\x00\x00\x00'[0:(8 -
                                                       out_file.tell() % 8)])
            trace_file.seek(begin_pos + 16 + struct_len)
            tag = trace_file.read(4)
            index += 1
    except (ValueError, lz4.block.LZ4BlockError) as ex:
        logger.exception('')
        return False
    return True
Exemplo n.º 11
0
    def _ReadNtSid(self, data):
        '''Reads a windows SID from its raw binary form'''
        sid = ''
        size = len(data)
        if size < 8:
            logger.error('Not a windows sid')
        rev = struct.unpack("<B", data[0])[0]
        num_sub_auth = struct.unpack("<B", data[1])[0]
        authority = struct.unpack(">I", data[4:8])[0]

        if size < (8 + (num_sub_auth * 4)):
            logger.error(
                'buffer too small or truncated - cant fit all sub_auth')
            return ''
        sub_authorities = struct.unpack('<{}I'.format(num_sub_auth),
                                        data[8:8 * num_sub_auth])
        sid = 'S-{}-{}-'.format(rev, authority) + '-'.join(
            [str(sa) for sa in sub_authorities])
        return sid
Exemplo n.º 12
0
def ReadTimesyncFolder(path, ts_list, vfs):
    '''Reads files in the timesync folder specified by 'path' and populates ts_list 
       with timesync entries.
       vfs = VirtualFileSystem object
    '''
    try:
        entries = vfs.listdir(path)
        for entry in sorted(entries): # sort the files by name, so continuous time will be sequential automatically
            if entry.endswith(".timesync"):
                file_path = vfs.path_join(path, entry)
                logger.debug('Trying to read timesync file {}'.format(file_path))
                f = vfs.get_virtual_file(file_path, 'TimeSync').open()
                if f:
                    buffer = f.read() # should be a fairly small file!
                    ReadTimesyncFile(buffer, ts_list)
                    f.close()
            else:
                logger.error("In Timesync folder, found non-ts file {}".format(entry))
    except Exception:
        logger.exception('')
Exemplo n.º 13
0
def Main():
    '''The main program function.

    Returns:
      bool: True if successful or False if not.
    '''
    description = (
        'UnifiedLogReader is a tool to read macOS Unified Logging tracev3 files.\n'
        'This is version {0:s} tested on macOS 10.12.5 - 10.15 and iOS 12.\n\n'
        'Notes:\n-----\n'
        'If you have a .logarchive, then point uuidtext_path to the .logarchive folder, \n'
        'the timesync folder is within the logarchive folder').format(UnifiedLog.__version__)

    arg_parser = argparse.ArgumentParser(
        description=description, formatter_class=argparse.RawTextHelpFormatter)
    arg_parser.add_argument('uuidtext_path', help='Path to uuidtext folder (/var/db/uuidtext)')
    arg_parser.add_argument('timesync_path', help='Path to timesync folder (/var/db/diagnostics/timesync)')
    arg_parser.add_argument('tracev3_path', help='Path to either tracev3 file or folder to recurse (/var/db/diagnostics)')
    arg_parser.add_argument('output_path', help='An existing folder where output will be saved')

    arg_parser.add_argument(
         '-f', '--output_format', action='store', choices=(
             'SQLITE', 'TSV_ALL', 'LOG_DEFAULT'),
         metavar='FORMAT', default='LOG_DEFAULT', help=(
             'Output format: SQLITE, TSV_ALL, LOG_DEFAULT  (Default is LOG_DEFAULT)'), type=str.upper)

    arg_parser.add_argument('-l', '--log_level', help='Log levels: INFO, DEBUG, WARNING, ERROR (Default is INFO)')

    args = arg_parser.parse_args()

    output_path = args.output_path.rstrip('\\/')
    uuidtext_folder_path = args.uuidtext_path.rstrip('\\/')
    timesync_folder_path = args.timesync_path.rstrip('\\/')
    tracev3_path = args.tracev3_path.rstrip('\\/')

    if not os.path.exists(uuidtext_folder_path):
        print('Exiting..UUIDTEXT Path not found {}'.format(uuidtext_folder_path))
        return

    if not os.path.exists(timesync_folder_path):
        print('Exiting..TIMESYNC Path not found {}'.format(timesync_folder_path))
        return

    if not os.path.exists(tracev3_path):
        print('Exiting..traceV3 Path not found {}'.format(tracev3_path))
        return

    if not os.path.exists(output_path):
        print ('Creating output folder {}'.format(output_path))
        os.makedirs(output_path)

    log_file_path = os.path.join(output_path, "Log." + time.strftime("%Y%m%d-%H%M%S") + ".txt")

    # log
    if args.log_level:
        args.log_level = args.log_level.upper()
        if not args.log_level in ['INFO','DEBUG','WARNING','ERROR','CRITICAL']:
            print("Invalid input type for log level. Valid values are INFO, DEBUG, WARNING, ERROR")
            return
        else:
            if args.log_level == "INFO": args.log_level = logging.INFO
            elif args.log_level == "DEBUG": args.log_level = logging.DEBUG
            elif args.log_level == "WARNING": args.log_level = logging.WARNING
            elif args.log_level == "ERROR": args.log_level = logging.ERROR
            elif args.log_level == "CRITICAL": args.log_level = logging.CRITICAL
    else:
        args.log_level = logging.INFO

    log_level = args.log_level #logging.DEBUG
    log_console_handler = logging.StreamHandler()
    log_console_handler.setLevel(log_level)
    log_console_format  = logging.Formatter('%(levelname)s - %(message)s')
    log_console_handler.setFormatter(log_console_format)
    logger.addHandler(log_console_handler)

    #log file
    log_file_handler = logging.FileHandler(log_file_path)
    log_file_handler.setFormatter(log_console_format)
    logger.addHandler(log_file_handler)
    logger.setLevel(log_level)

    unified_log_reader = UnifiedLogReader()

    if not unified_log_reader.ReadTimesyncFolder(timesync_folder_path):
        logger.error('Failed to get any timesync entries')
        return False

    if args.output_format == 'SQLITE':
        database_path = os.path.join(output_path, 'unifiedlogs.sqlite')
        output_writer = SQLiteDatabaseOutputWriter(database_path)

    elif args.output_format in ('TSV_ALL', 'LOG_DEFAULT'):
        file_path = os.path.join(output_path, 'logs.txt')
        output_writer = FileOutputWriter(
            file_path, mode=args.output_format)

    if not output_writer.Open():
        return False

    time_processing_started = time.time()
    logger.info('Started processing')

    unified_log_reader.ReadDscFiles(uuidtext_folder_path)
    unified_log_reader.ReadTraceV3Files(tracev3_path, output_writer)

    output_writer.Close()

    time_processing_ended = time.time()
    run_time = time_processing_ended - time_processing_started
    logger.info("Finished in time = {}".format(time.strftime('%H:%M:%S', time.gmtime(run_time))))
    logger.info("{} Logs processed".format(unified_log_reader.total_logs_processed))
    logger.info("Review the Log file and report any ERRORs or EXCEPTIONS to the developers")

    return True
Exemplo n.º 14
0
def Main():
    description = (
        'UnifiedLogReader is a tool to read macOS Unified Logging tracev3 files.\n'
        'This is version {0:s} tested on macOS 10.12.5 - 10.15 and iOS 12.\n\n'
        'Notes:\n-----\n'
        'If you have a .logarchive, then point uuidtext_path to the .logarchive folder, \n'
        'the timesync folder is within the logarchive folder').format(
            UnifiedLog.__version__)

    arg_parser = argparse.ArgumentParser(
        description=description, formatter_class=argparse.RawTextHelpFormatter)
    arg_parser.add_argument('logarchive_path',
                            help='Path to logarchive folder')
    arg_parser.add_argument(
        '--log',
        help='An existing folder where output will be saved',
        required=False)
    args = arg_parser.parse_args()
    # print(vars(args))
    logarchive_path = args.logarchive_path
    output_path = args.log
    if not os.path.exists(logarchive_path):
        print('Exiting..LogArchive Path not found {}'.format(logarchive_path))
    timesync_folder_path = os.path.join(logarchive_path, "timesync")
    if not os.path.exists(timesync_folder_path):
        print(
            'Exiting..Timesync Path not found {}'.format(timesync_folder_path))
    tracev3_path = os.path.join(logarchive_path, "Persist")
    if not os.path.exists(timesync_folder_path):
        tracev3_path = logarchive_path
    log_file_path = None
    if bool(output_path) and os.path.exists(output_path):
        # print ('Creating output folder {}'.format(output_path))
        # os.makedirs(output_path)
        log_file_path = os.path.join(
            output_path, "Log." + time.strftime("%Y%m%d-%H%M%S") + ".txt")

    # log_level = logging.INFO
    # log_console_handler = logging.StreamHandler()
    # log_console_handler.setLevel(log_level)
    # log_console_format  = logging.Formatter('%(levelname)s - %(message)s')
    # log_console_handler.setFormatter(log_console_format)
    # logger.addHandler(log_console_handler)

    #log file
    if bool(log_file_path):
        log_file_handler = logging.FileHandler(log_file_path)
        log_file_handler.setFormatter(
            logging.Formatter('%(levelname)s - %(message)s'))
        logger.addHandler(log_file_handler)
        logger.setLevel(logging.INFO)

    unified_log_reader = UnifiedLogReader()
    if not unified_log_reader.ReadTimesyncFolder(timesync_folder_path):
        logger.error('Failed to get any timesync entries')
        return False

    output_writer = None
    if bool(output_path) and os.path.exists(output_path):
        file_path = os.path.join(output_path, 'logs.txt')
        output_writer = FileOutputWriter(file_path)
        if not output_writer.Open():
            return False

    time_processing_started = time.time()
    logger.info('Started processing')

    unified_log_reader.ReadDscFiles(logarchive_path)
    unified_log_reader.ReadTraceV3Files(tracev3_path, output_writer)

    if bool(output_path):
        output_writer.Close()

    time_processing_ended = time.time()
    run_time = time_processing_ended - time_processing_started
    logger.info("Finished in time = {}".format(
        time.strftime('%H:%M:%S', time.gmtime(run_time))))
    logger.info("{} Logs processed".format(
        unified_log_reader.total_logs_processed))
    logger.info(
        "Review the Log file and report any ERRORs or EXCEPTIONS to the developers"
    )

    return True