def Open(self): '''Opens the output writer.''' if os.path.exists(self._path): try: logger.info('Database already exists, trying to delete it.') os.remove(self._path) except (IOError, OSError): logger.exception('Unable to remove existing database at %s.', self._path) return False try: logger.info('Trying to create new database file at %s.', self._path) self._connection = sqlite3.connect(self._path) cursor = self._connection.cursor() cursor.execute(self._CREATE_LOGS_TABLE_QUERY) except sqlite3.Error: logger.exception('Failed to create database at %s', self._path) return False return True
def DecompressTraceV3Log(input_path, output_path): try: with open(input_path, 'rb') as trace_file: with open(output_path, 'wb') as out_file: return UnifiedLogLib.DecompressTraceV3(trace_file, out_file) except: logger.exception('')
def ParseFolder(self, uuidtext_folder_path): '''Parse the uuidtext folder specified and parse all uuidtext/dsc files, adding them to the cache''' try: # dsc dsc_path = self.vfs.path_join(uuidtext_folder_path, 'dsc') entries = self.vfs.listdir(dsc_path) for dsc_name in entries: if len(dsc_name) == 32: dsc_path = self.vfs.get_virtual_file(self.vfs.path_join(dsc_path, dsc_name), 'Dsc') dsc = dsc_file.Dsc(dsc_path) dsc.Parse() self.cached_dsc[dsc_name] = dsc # uuidtext - can't have this or python will complain of too many open files! # entries = self.vfs.listdir(uuidtext_folder_path) # index = 0 # for index in range(0x100): # folder_name = '{:02X}'.format(index) # #if vfs.path_exists(folder_path): # if folder_name in entries: # folder_path = self.vfs.path_join(uuidtext_folder_path, folder_name) # uuid_names = self.vfs.listdir(folder_path) # for uuid_name in uuid_names: # if len(uuid_name) == 30: # filtering out possibly other files there! # uuidtext_path = self.vfs.path_join(folder_path, uuid_name) # file_object = self.vfs.get_virtual_file(uuidtext_path, 'Uuidtext') # ut = uuidtext_file.Uuidtext(file_object, UUID(folder_name + uuid_name)) # ut.Parse() # self.cached_uuidtext[folder_name + uuid_name] = ut # else: # logger.debug(folder_name + ' does not exist') except Exception: logger.exception('')
def Close(self): '''Closes the unified logs reader.''' if self._connection: try: self._connection.close() except sqlite3.Error: logger.exception('Unable to close database') self._connection = None self._path = None
def ReadTimesyncFile(buffer, ts_list): try: pos = 0 size = len(buffer) while pos < size: sig, header_size, unk1 = struct.unpack("<HHI", buffer[pos:pos + 8]) if sig != 0xBBB0: logger.error( "not the right signature for Timesync header, got 0x{:04X} instead of 0x{:04X}, pos was 0x{:08X}" .format(sig, 0x0030BBB0, pos)) break uuid = UUID(bytes=buffer[pos + 8:pos + 24]) ts_numer, ts_denom, t_stamp, tz, is_dst = struct.unpack( "<IIqiI", buffer[pos + 24:pos + 48]) ts_header = resources.TimesyncHeader(sig, unk1, uuid, ts_numer, ts_denom, t_stamp, tz, is_dst) pos += header_size # 0x30 (48) by default if header_size != 0x30: logger.info( "Timesync header was 0x{:X} bytes instead of 0x30(48) bytes!" .format(size)) logger.debug("TIMEHEAD {} 0x{:016X} {} {}".format( uuid, t_stamp, ReadAPFSTime(t_stamp), 'boot')) #TODO - TEST search ts_list for existing, not seen so far existing_ts = None for ts in ts_list: if ts.header.boot_uuid == uuid: existing_ts = ts break if existing_ts: ts_obj = existing_ts else: ts_obj = resources.Timesync(ts_header) ts_list.append(ts_obj) # Adding header timestamp as Ts type too with cont_time = 0 timesync_item = resources.TimesyncItem(0, 0, t_stamp, tz, is_dst) ts_obj.items.append(timesync_item) while pos < size: if buffer[pos:pos + 4] == b'Ts \x00': ts_unknown, cont_time, t_stamp, bias, is_dst = struct.unpack( "<IqqiI", buffer[pos + 4:pos + 32]) timesync_item = resources.TimesyncItem( ts_unknown, cont_time, t_stamp, bias, is_dst) ts_obj.items.append(timesync_item) logger.debug("TIMESYNC {} 0x{:016X} {} {}".format( uuid, t_stamp, ReadAPFSTime(t_stamp), ts_unknown)) else: break # break this loop, parse as header pos += 32 except Exception as ex: logger.exception("Exception reading TimesyncFile")
def open(self, mode='rb'): '''Opens a file for reading/writing, returns file pointer or None''' try: logger.debug('Trying to read {} file {}'.format(self.file_type, self.path)) self.file_pointer = open(self.path, mode) return self.file_pointer except Exception as ex: if str(ex).find('No such file') == -1: logger.exception('Failed to open file {}'.format(self.path)) else: logger.error('Failed to open as file not found {}'.format(self.path)) self.file_not_found = True self.is_valid = False return None
def DecompressTraceV3(trace_file, out_file): ''' Creates an uncompressed version of the .traceV3 file. Input parameters: trace_file = file pointer to .traceV3 file (opened as 'rb') out_file = file pointer to blank file (opened as 'wb') Returns True/False ''' try: index = 0 tag = trace_file.read(4) while tag: begin_pos = trace_file.tell() - 4 trace_file.seek(begin_pos + 8) struct_len = struct.unpack('<Q', trace_file.read(8))[0] logger.debug("index={} pos=0x{:X} tag=0x{}".format( index, begin_pos, binascii.hexlify(tag)[::-1])) trace_file.seek(begin_pos) chunk_data_incl_header = trace_file.read(16 + struct_len) if tag == b'\x00\x10\x00\x00': # header out_file.write(chunk_data_incl_header ) # boot_uuid header, write to output directly elif tag[0] == 0x0B: out_file.write(chunk_data_incl_header ) # uncompressed, write to output directly elif tag[0] == 0x0D: uncompressed = DecompressChunkData(chunk_data_incl_header[16:], struct_len) out_file.write(chunk_data_incl_header[0:8]) # Same Header ! out_file.write(struct.pack('<Q', len(uncompressed))) # New size out_file.write(uncompressed) else: logger.error('Unknown chunk tag value encountered : {}'.format( binascii.hexlify(tag))) out_file.write(chunk_data_incl_header) if struct_len % 8: # Go to QWORD boundary struct_len += 8 - (struct_len % 8) if out_file.tell() % 8: # Go to QWORD boundary on output out_file.write( b'\x00\x00\x00\x00\x00\x00\x00'[0:(8 - out_file.tell() % 8)]) trace_file.seek(begin_pos + 16 + struct_len) tag = trace_file.read(4) index += 1 except (ValueError, lz4.block.LZ4BlockError) as ex: logger.exception('') return False return True
def _ReadCString(self, data, max_len=1024): '''Returns a C utf8 string (excluding terminating null)''' pos = 0 max_len = min(len(data), max_len) string = '' try: null_pos = data.find(b'\x00', 0, max_len) if null_pos == -1: logger.warning("Possible corrupted string encountered") string = data.decode('utf8') else: string = data[0:null_pos].decode('utf8') except: logger.exception('Error reading C-String') return string
def WriteLogEntry(self, log): '''Writes a Unified Log entry. Args: log (???): log entry: ''' if self._connection: log[3] = UnifiedLogLib.ReadAPFSTime(log[3]) log[18] = '{0!s}'.format(log[18]) log[19] = '{0!s}'.format(log[19]) # TODO: cache queries to use executemany try: cursor = self._connection.cursor() cursor.execute(self._INSERT_LOGS_VALUES_QUERY, log) except sqlite3.Error: logger.exception('Error inserting data into database')
def WriteLogEntry(self, log): '''Writes a Unified Log entry. Args: log (???): log entry: ''' if self._file_object: log[3] = UnifiedLogLib.ReadAPFSTime(log[3]) try: if self._mode == 'TSV_ALL': log[18] = '{0!s}'.format(log[18]).upper() log[19] = '{0!s}'.format(log[19]).upper() self._file_object.write( ('{}\t0x{:X}\t{}\t{}\t0x{:X}\t{}\t0x{:X}\t0x{:X}\t{}\t' '{}\t{}\t({})\t{}\t{}\t{}\t{}\t{}\t{}\t{}\t{}\t{}\t' '{}\n').format(log[0], log[1], log[2], log[3], log[4], log[5], log[6], log[7], log[8], log[9], log[10], log[11], log[12], log[13], log[14], log[15], log[16], log[17], log[18], log[19], log[20], log[21], log[22])) else: signpost = '' # (log[15] + ':') if log[15] else '' if log[15]: signpost += '[' + log[16] + ']' msg = (signpost + ' ') if signpost else '' msg += log[11] + ' ' + ( ('(' + log[12] + ') ') if log[12] else '') if len(log[13]) or len(log[14]): msg += '[' + log[13] + ':' + log[14] + '] ' msg += log[22] self._file_object.write( ('{time} {li[4]:<#10x} {li[5]:11} {li[6]:<#20x} ' '{li[8]:<6} {li[10]:<4} {message}\n').format( li=log, time=log[3], message=msg.replace('\n', ','))) except (IOError, OSError): logger.exception('Error writing to output file')
def _ReadCStringAndEndPos(self, data, max_len=1024): '''Returns a tuple containing a C utf8 string (excluding terminating null) and the end position in the data ("utf8-string", pos) ''' pos = 0 max_len = min(len(data), max_len) string = '' null_pos = -1 try: null_pos = data.find(b'\x00', 0, max_len) if null_pos == -1: logger.warning("Possible corrupted string encountered") string = data.decode('utf8') else: string = data[0:null_pos].decode('utf8') except: logger.exception('Error reading C-String') return string, null_pos
def ReadTimesyncFolder(path, ts_list, vfs): '''Reads files in the timesync folder specified by 'path' and populates ts_list with timesync entries. vfs = VirtualFileSystem object ''' try: entries = vfs.listdir(path) for entry in sorted(entries): # sort the files by name, so continuous time will be sequential automatically if entry.endswith(".timesync"): file_path = vfs.path_join(path, entry) logger.debug('Trying to read timesync file {}'.format(file_path)) f = vfs.get_virtual_file(file_path, 'TimeSync').open() if f: buffer = f.read() # should be a fairly small file! ReadTimesyncFile(buffer, ts_list) f.close() else: logger.error("In Timesync folder, found non-ts file {}".format(entry)) except Exception: logger.exception('')
def Parse(self): '''Parses a dsc file. self._file.is_valid is set to False if this method encounters issues parsing the file. Returns: bool: True if the dsc file-like object was successfully parsed, False otherwise. ''' file_object = self._file.open() if not file_object: return False try: result = self._ParseFileObject(file_object) except (IOError, OSError, struct.error): logger.exception('DSC Parser error') result = False if not result: self._file.is_valid = False return result
def Open(self): '''Opens the output writer. Returns: bool: True if successful or False on error. ''' logger.info('Creating output file %s', self._path) try: # io.open() is portable between Python 2 and 3 # using text mode so we don't have to care about end-of-line character self._file_object = io.open(self._path, 'wt', encoding='utf-8') try: if self._mode == 'TSV_ALL': self._file_object.write(self._HEADER_ALL) else: self._file_object.write(self._HEADER_DEFAULT) except (IOError, OSError): logger.exception('Error writing to output file') return False except (IOError, OSError): logger.exception('Failed to open file %s', self._path) return False return True