def Open(self): '''Opens the output writer.''' if os.path.exists(self._path): try: logger.info('Database already exists, trying to delete it.') os.remove(self._path) except (IOError, OSError): logger.exception('Unable to remove existing database at %s.', self._path) return False try: logger.info('Trying to create new database file at %s.', self._path) self._connection = sqlite3.connect(self._path) cursor = self._connection.cursor() cursor.execute(self._CREATE_LOGS_TABLE_QUERY) except sqlite3.Error: logger.exception('Failed to create database at %s', self._path) return False return True
def _ParseFileObject(self, file_object): '''Parses a dsc file-like object. Args: file_object (file): file-like object. Returns: bool: True if the dsc file-like object was successfully parsed, False otherwise. Raises: IOError: if the dsc file cannot be parsed. OSError: if the dsc file cannot be parsed. struct.error: if the dsc file cannot be parsed. ''' file_header_data = file_object.read(16) if file_header_data[0:4] != b'hcsd': signature_base16 = binascii.hexlify(file_header_data[0:4]) logger.info( ('Wrong signature in DSC file, got 0x{} instead of 0x68637364 ' '(hcsd)').format(signature_base16)) return False major_version, minor_version, num_range_entries, num_uuid_entries = ( struct.unpack("<HHII", file_header_data[4:16])) self._format_version = '{0:d}.{1:d}'.format(major_version, minor_version) while len(self.range_entries) < num_range_entries: range_entry_data = file_object.read(16) uuid_index, v_off, data_offset, data_len = struct.unpack( "<IIII", range_entry_data) range_entry = [uuid_index, v_off, data_offset, data_len] self.range_entries.append(range_entry) uuid_entry_offset = file_object.tell() while len(self.uuid_entries) < num_uuid_entries: file_object.seek(uuid_entry_offset, os.SEEK_SET) uuid_entry_data = file_object.read(28) uuid_entry_offset += 28 v_off, size = struct.unpack("<II", uuid_entry_data[:8]) uuid_object = UUID(bytes=uuid_entry_data[8:24]) data_offset = struct.unpack("<I", uuid_entry_data[24:])[0] file_object.seek(data_offset, os.SEEK_SET) path_data = file_object.read(1024) # File path should not be >1024 lib_path = self._ReadCString(path_data) lib_name = posixpath.basename(lib_path) self.uuid_entries.append( [v_off, size, uuid_object, lib_path, lib_name]) return True
def ReadTimesyncFile(buffer, ts_list): try: pos = 0 size = len(buffer) while pos < size: sig, header_size, unk1 = struct.unpack("<HHI", buffer[pos:pos + 8]) if sig != 0xBBB0: logger.error( "not the right signature for Timesync header, got 0x{:04X} instead of 0x{:04X}, pos was 0x{:08X}" .format(sig, 0x0030BBB0, pos)) break uuid = UUID(bytes=buffer[pos + 8:pos + 24]) ts_numer, ts_denom, t_stamp, tz, is_dst = struct.unpack( "<IIqiI", buffer[pos + 24:pos + 48]) ts_header = resources.TimesyncHeader(sig, unk1, uuid, ts_numer, ts_denom, t_stamp, tz, is_dst) pos += header_size # 0x30 (48) by default if header_size != 0x30: logger.info( "Timesync header was 0x{:X} bytes instead of 0x30(48) bytes!" .format(size)) logger.debug("TIMEHEAD {} 0x{:016X} {} {}".format( uuid, t_stamp, ReadAPFSTime(t_stamp), 'boot')) #TODO - TEST search ts_list for existing, not seen so far existing_ts = None for ts in ts_list: if ts.header.boot_uuid == uuid: existing_ts = ts break if existing_ts: ts_obj = existing_ts else: ts_obj = resources.Timesync(ts_header) ts_list.append(ts_obj) # Adding header timestamp as Ts type too with cont_time = 0 timesync_item = resources.TimesyncItem(0, 0, t_stamp, tz, is_dst) ts_obj.items.append(timesync_item) while pos < size: if buffer[pos:pos + 4] == b'Ts \x00': ts_unknown, cont_time, t_stamp, bias, is_dst = struct.unpack( "<IqqiI", buffer[pos + 4:pos + 32]) timesync_item = resources.TimesyncItem( ts_unknown, cont_time, t_stamp, bias, is_dst) ts_obj.items.append(timesync_item) logger.debug("TIMESYNC {} 0x{:016X} {} {}".format( uuid, t_stamp, ReadAPFSTime(t_stamp), ts_unknown)) else: break # break this loop, parse as header pos += 32 except Exception as ex: logger.exception("Exception reading TimesyncFile")
def _ParseFileObject(self, file_object): '''Parses an uuidtext file-like object. Args: file_object (file): file-like object. Returns: bool: True if the uuidtext file-like object was successfully parsed, False otherwise. Raises: IOError: if the uuidtext file cannot be parsed. OSError: if the uuidtext file cannot be parsed. struct.error: if the uuidtext file cannot be parsed. ''' file_header_data = file_object.read(16) if file_header_data[0:4] != b'\x99\x88\x77\x66': signature_base16 = binascii.hexlify(file_header_data[0:4]) logger.info( ('Wrong signature in uuidtext file, got 0x{} instead of ' '0x99887766').format(signature_base16)) return False # Note that the flag1 and flag2 are not used. flag1, flag2, num_entries = struct.unpack("<III", file_header_data[4:16]) entries_data_size = 8 * num_entries entries_data = file_object.read(entries_data_size) entry_offset = 0 data_offset = 16 + entries_data_size while len(self._entries) < num_entries: entry_end_offset = entry_offset + 8 range_start_offset, data_len = struct.unpack( "<II", entries_data[entry_offset:entry_end_offset]) entry_offset = entry_end_offset entry_tuple = (range_start_offset, data_offset, data_len) self._entries.append(entry_tuple) data_offset += data_len file_object.seek(data_offset, os.SEEK_SET) library_path_data = file_object.read(1024) self.library_path = self._ReadCString(library_path_data) self.library_name = posixpath.basename(self.library_path) return True
def _ReadTraceV3Folder(self, tracev3_path, output_writer): '''Reads all the tracev3 files in the folder. Args: tracev3_path (str): path of the tracev3 folder. output_writer (OutputWriter): output writer. ''' for directory_entry in os.listdir(tracev3_path): directory_entry_path = os.path.join(tracev3_path, directory_entry) if os.path.isdir(directory_entry_path): self._ReadTraceV3Folder(directory_entry_path, output_writer) elif (directory_entry.lower().endswith('.tracev3') and not directory_entry.startswith('._')): logger.info("Trying to read file - %s", directory_entry_path) self._ReadTraceV3File(directory_entry_path, output_writer)
def Open(self): '''Opens the output writer. Returns: bool: True if successful or False on error. ''' logger.info('Creating output file %s', self._path) try: # io.open() is portable between Python 2 and 3 # using text mode so we don't have to care about end-of-line character self._file_object = io.open(self._path, 'wt', encoding='utf-8') try: if self._mode == 'TSV_ALL': self._file_object.write(self._HEADER_ALL) else: self._file_object.write(self._HEADER_DEFAULT) except (IOError, OSError): logger.exception('Error writing to output file') return False except (IOError, OSError): logger.exception('Failed to open file %s', self._path) return False return True
def Main(): '''The main program function. Returns: bool: True if successful or False if not. ''' description = ( 'UnifiedLogReader is a tool to read macOS Unified Logging tracev3 files.\n' 'This is version {0:s} tested on macOS 10.12.5 - 10.15 and iOS 12.\n\n' 'Notes:\n-----\n' 'If you have a .logarchive, then point uuidtext_path to the .logarchive folder, \n' 'the timesync folder is within the logarchive folder').format(UnifiedLog.__version__) arg_parser = argparse.ArgumentParser( description=description, formatter_class=argparse.RawTextHelpFormatter) arg_parser.add_argument('uuidtext_path', help='Path to uuidtext folder (/var/db/uuidtext)') arg_parser.add_argument('timesync_path', help='Path to timesync folder (/var/db/diagnostics/timesync)') arg_parser.add_argument('tracev3_path', help='Path to either tracev3 file or folder to recurse (/var/db/diagnostics)') arg_parser.add_argument('output_path', help='An existing folder where output will be saved') arg_parser.add_argument( '-f', '--output_format', action='store', choices=( 'SQLITE', 'TSV_ALL', 'LOG_DEFAULT'), metavar='FORMAT', default='LOG_DEFAULT', help=( 'Output format: SQLITE, TSV_ALL, LOG_DEFAULT (Default is LOG_DEFAULT)'), type=str.upper) arg_parser.add_argument('-l', '--log_level', help='Log levels: INFO, DEBUG, WARNING, ERROR (Default is INFO)') args = arg_parser.parse_args() output_path = args.output_path.rstrip('\\/') uuidtext_folder_path = args.uuidtext_path.rstrip('\\/') timesync_folder_path = args.timesync_path.rstrip('\\/') tracev3_path = args.tracev3_path.rstrip('\\/') if not os.path.exists(uuidtext_folder_path): print('Exiting..UUIDTEXT Path not found {}'.format(uuidtext_folder_path)) return if not os.path.exists(timesync_folder_path): print('Exiting..TIMESYNC Path not found {}'.format(timesync_folder_path)) return if not os.path.exists(tracev3_path): print('Exiting..traceV3 Path not found {}'.format(tracev3_path)) return if not os.path.exists(output_path): print ('Creating output folder {}'.format(output_path)) os.makedirs(output_path) log_file_path = os.path.join(output_path, "Log." + time.strftime("%Y%m%d-%H%M%S") + ".txt") # log if args.log_level: args.log_level = args.log_level.upper() if not args.log_level in ['INFO','DEBUG','WARNING','ERROR','CRITICAL']: print("Invalid input type for log level. Valid values are INFO, DEBUG, WARNING, ERROR") return else: if args.log_level == "INFO": args.log_level = logging.INFO elif args.log_level == "DEBUG": args.log_level = logging.DEBUG elif args.log_level == "WARNING": args.log_level = logging.WARNING elif args.log_level == "ERROR": args.log_level = logging.ERROR elif args.log_level == "CRITICAL": args.log_level = logging.CRITICAL else: args.log_level = logging.INFO log_level = args.log_level #logging.DEBUG log_console_handler = logging.StreamHandler() log_console_handler.setLevel(log_level) log_console_format = logging.Formatter('%(levelname)s - %(message)s') log_console_handler.setFormatter(log_console_format) logger.addHandler(log_console_handler) #log file log_file_handler = logging.FileHandler(log_file_path) log_file_handler.setFormatter(log_console_format) logger.addHandler(log_file_handler) logger.setLevel(log_level) unified_log_reader = UnifiedLogReader() if not unified_log_reader.ReadTimesyncFolder(timesync_folder_path): logger.error('Failed to get any timesync entries') return False if args.output_format == 'SQLITE': database_path = os.path.join(output_path, 'unifiedlogs.sqlite') output_writer = SQLiteDatabaseOutputWriter(database_path) elif args.output_format in ('TSV_ALL', 'LOG_DEFAULT'): file_path = os.path.join(output_path, 'logs.txt') output_writer = FileOutputWriter( file_path, mode=args.output_format) if not output_writer.Open(): return False time_processing_started = time.time() logger.info('Started processing') unified_log_reader.ReadDscFiles(uuidtext_folder_path) unified_log_reader.ReadTraceV3Files(tracev3_path, output_writer) output_writer.Close() time_processing_ended = time.time() run_time = time_processing_ended - time_processing_started logger.info("Finished in time = {}".format(time.strftime('%H:%M:%S', time.gmtime(run_time)))) logger.info("{} Logs processed".format(unified_log_reader.total_logs_processed)) logger.info("Review the Log file and report any ERRORs or EXCEPTIONS to the developers") return True
def Main(): description = ( 'UnifiedLogReader is a tool to read macOS Unified Logging tracev3 files.\n' 'This is version {0:s} tested on macOS 10.12.5 - 10.15 and iOS 12.\n\n' 'Notes:\n-----\n' 'If you have a .logarchive, then point uuidtext_path to the .logarchive folder, \n' 'the timesync folder is within the logarchive folder').format( UnifiedLog.__version__) arg_parser = argparse.ArgumentParser( description=description, formatter_class=argparse.RawTextHelpFormatter) arg_parser.add_argument('logarchive_path', help='Path to logarchive folder') arg_parser.add_argument( '--log', help='An existing folder where output will be saved', required=False) args = arg_parser.parse_args() # print(vars(args)) logarchive_path = args.logarchive_path output_path = args.log if not os.path.exists(logarchive_path): print('Exiting..LogArchive Path not found {}'.format(logarchive_path)) timesync_folder_path = os.path.join(logarchive_path, "timesync") if not os.path.exists(timesync_folder_path): print( 'Exiting..Timesync Path not found {}'.format(timesync_folder_path)) tracev3_path = os.path.join(logarchive_path, "Persist") if not os.path.exists(timesync_folder_path): tracev3_path = logarchive_path log_file_path = None if bool(output_path) and os.path.exists(output_path): # print ('Creating output folder {}'.format(output_path)) # os.makedirs(output_path) log_file_path = os.path.join( output_path, "Log." + time.strftime("%Y%m%d-%H%M%S") + ".txt") # log_level = logging.INFO # log_console_handler = logging.StreamHandler() # log_console_handler.setLevel(log_level) # log_console_format = logging.Formatter('%(levelname)s - %(message)s') # log_console_handler.setFormatter(log_console_format) # logger.addHandler(log_console_handler) #log file if bool(log_file_path): log_file_handler = logging.FileHandler(log_file_path) log_file_handler.setFormatter( logging.Formatter('%(levelname)s - %(message)s')) logger.addHandler(log_file_handler) logger.setLevel(logging.INFO) unified_log_reader = UnifiedLogReader() if not unified_log_reader.ReadTimesyncFolder(timesync_folder_path): logger.error('Failed to get any timesync entries') return False output_writer = None if bool(output_path) and os.path.exists(output_path): file_path = os.path.join(output_path, 'logs.txt') output_writer = FileOutputWriter(file_path) if not output_writer.Open(): return False time_processing_started = time.time() logger.info('Started processing') unified_log_reader.ReadDscFiles(logarchive_path) unified_log_reader.ReadTraceV3Files(tracev3_path, output_writer) if bool(output_path): output_writer.Close() time_processing_ended = time.time() run_time = time_processing_ended - time_processing_started logger.info("Finished in time = {}".format( time.strftime('%H:%M:%S', time.gmtime(run_time)))) logger.info("{} Logs processed".format( unified_log_reader.total_logs_processed)) logger.info( "Review the Log file and report any ERRORs or EXCEPTIONS to the developers" ) return True